├── .gitignore ├── LICENSE ├── README.md ├── dataset.py ├── models ├── UNet.py ├── VIT.py ├── backbones │ ├── PVTV2.py │ ├── resnet.py │ └── swin.py ├── clustering │ ├── pair_wise_distance.py │ └── pair_wise_distance_cuda_source.py ├── modeling.py ├── pre_trained │ └── pvt_v2_b1.pth ├── spectrum_pixel_clustering.py └── yamls │ ├── PVTV2.yaml │ ├── resnet.yaml │ └── swin.yaml ├── requirements.txt ├── test.py ├── test.sh ├── train.py ├── train_and_test.sh ├── txts ├── tr.txt ├── ts.txt └── val.txt └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 laprf 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Dual-stage Hyperspectral Image Classification Model with Spectral Supertoken [ECCV 2024] 2 | by [Peifu Liu](https://scholar.google.com/citations?user=yrRXe-8AAAAJ&hl=zh-CN), [Tingfa Xu](https://scholar.google.com/citations?user=vmDc8dwAAAAJ&hl=zh-CN), [Jie Wang](https://roywangj.github.io/), Huan Chen, Huiyan Bai, and [Jianan Li](https://scholar.google.com.hk/citations?user=sQ_nP0ZaMn0C&hl=zh-CN&oi=ao). 3 | 4 | [![arXiv](https://img.shields.io/badge/📃-arXiv-ff69b4)](https://arxiv.org/abs/2407.07307v1) 5 | [![Google Drive](https://img.shields.io/badge/Google_Drive-4285F4?logo=googledrive&logoColor=white)](https://drive.google.com/drive/folders/19UY5cgeXG03d56cj4CUSS85mr0rgR6Wt?usp=sharing) 6 | 7 | ## Requirements 8 | In this repository, we provide a `requirements.txt` file that lists all the dependencies. Additionally, the installation `.whl` file for GDAL can be found at [Google Drive](https://drive.google.com/drive/folders/19UY5cgeXG03d56cj4CUSS85mr0rgR6Wt?usp=sharing) and can be installed directly using pip: 9 | ``` bash 10 | pip install -r requirements.txt 11 | pip install GDAL-3.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl 12 | ``` 13 | 14 | ## Getting Started 15 | ### Preparation 16 | Please download [WHU-OHS](https://irsip.whu.edu.cn/resources/resources_v2.php) dataset in `data`, which should be organized as follows: 17 | ``` 18 | |--data 19 | |--tr 20 | |--image 21 | |--O1_0001.tif 22 | |--O1_0002.tif 23 | |--... 24 | |--label 25 | |--O1_0001.tif 26 | |--O1_0002.tif 27 | |--... 28 | |--ts 29 | |--image 30 | |--O1_0003.tif 31 | |--O1_0004.tif 32 | |--... 33 | |--label 34 | |--O1_0003.tif 35 | |--O1_0004.tif 36 | |--... 37 | |--val 38 | |--image 39 | |--O1_0015.tif 40 | |--O1_0042.tif 41 | |--... 42 | |--label 43 | |--O1_0015.tif 44 | |--O1_0042.tif 45 | |--... 46 | ``` 47 | 48 | Our DSTC utilizes pre-trained weights. The pre-trained weights for ResNet and Swin will be downloaded automatically, while those for PVT can be downloaded from [Google Drive](https://drive.google.com/drive/folders/19UY5cgeXG03d56cj4CUSS85mr0rgR6Wt?usp=sharing). Please place them in the `/models/pre-trained` folder. 49 | 50 | ### Testing 51 | If you wish to validate our method, our pre-trained weights are available on [Google Drive](https://drive.google.com/drive/folders/19UY5cgeXG03d56cj4CUSS85mr0rgR6Wt?usp=sharing). Please download them to the `/models/checkpoints` folder. Then run: 52 | ```bash 53 | sh test.sh 54 | ``` 55 | 56 | ### Training 57 | To train our model, execute the `train_and_test.sh script`. Model checkpoints will be stored in the `DataStorage/` directory. After training, the script will proceed to test the model and save the visualization results. 58 | ```bash 59 | sh train_and_test.sh 60 | ``` 61 | 62 | 63 | ## Acknowledgement 64 | We refer to the following repositories: 65 | - [Context Cluster](https://github.com/ma-xu/Context-Cluster) 66 | - [SPIN](https://github.com/ArcticHare105/SPIN) 67 | - [CVSSN](https://github.com/lms-07/CVSSN) 68 | 69 | Thanks for their great work! 70 | 71 | 72 | ## License 73 | This project is licensed under the [LICENSE.md](LICENSE.md). -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | from torch.utils.data import Dataset, DataLoader 4 | import numpy as np 5 | from osgeo import gdal 6 | 7 | IMG_EXTENSIONS = [ 8 | '.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', 9 | '.ppm', '.PPM', '.bmp', '.BMP', '.tif' 10 | ] 11 | 12 | 13 | class WHU_OHS_Dataset(Dataset): 14 | def __init__(self, image_file_list, label_file_list): 15 | self.image_file_list = image_file_list 16 | self.label_file_list = label_file_list 17 | 18 | def sample_stat(self): 19 | """Statistics of samples of each class in the dataset.""" 20 | sample_per_class = torch.zeros([24]) 21 | for label_file in self.label_file_list: 22 | label = gdal.Open(label_file, gdal.GA_ReadOnly) 23 | label = label.ReadAsArray() 24 | count = np.bincount(label.ravel(), minlength=25) 25 | count = count[1:25] 26 | count = torch.tensor(count) 27 | sample_per_class += count 28 | 29 | return sample_per_class 30 | 31 | def __len__(self): 32 | return len(self.image_file_list) 33 | 34 | def __getitem__(self, index): 35 | image_file = self.image_file_list[index] 36 | label_file = self.label_file_list[index] 37 | name = os.path.basename(image_file) 38 | image_dataset = gdal.Open(image_file, gdal.GA_ReadOnly) 39 | label_dataset = gdal.Open(label_file, gdal.GA_ReadOnly) 40 | 41 | image = image_dataset.ReadAsArray() 42 | label = label_dataset.ReadAsArray() 43 | image = torch.tensor(image, dtype=torch.float) / 10000.0 44 | label = torch.tensor(label, dtype=torch.float) - 1.0 45 | return image, label, name.split(".")[0] 46 | 47 | def get_dataset_loader(txt_file_path, data_path, batch_size, shuffle): 48 | image_list = [] 49 | label_list = [] 50 | 51 | with open(txt_file_path, 'r') as f: 52 | lines = f.readlines() 53 | for line in lines: 54 | line = line.strip().split(',') 55 | image_path = os.path.join(data_path, line[0] + '.tif') 56 | label_path = os.path.join(data_path.replace('image', 'label'), line[0] + '.tif') 57 | 58 | assert os.path.exists(label_path), f"{label_path} does not exist!" 59 | assert os.path.exists(image_path), f"{image_path} does not exist!" 60 | 61 | image_list.append(image_path) 62 | label_list.append(label_path) 63 | 64 | assert len(image_list) == len(label_list), "The number of images and labels must be equal!" 65 | 66 | dataset = WHU_OHS_Dataset( 67 | image_file_list=image_list, 68 | label_file_list=label_list, 69 | ) 70 | loader = DataLoader( 71 | dataset, 72 | batch_size=batch_size, 73 | shuffle=shuffle, 74 | num_workers=4, 75 | prefetch_factor=4, 76 | persistent_workers=True, 77 | pin_memory=True 78 | ) 79 | return loader 80 | 81 | 82 | def load_data(args, mode='tr'): 83 | assert mode in ['tr', 'val', 'ts'], "Invalid mode. Mode should be either 'tr', 'val' or 'ts'." 84 | data_path = os.path.join(args.data_root, mode, 'image') 85 | is_shuffle = True if mode == 'tr' else False 86 | txt_file_path = os.path.join("txts/", mode + ".txt") 87 | loader = get_dataset_loader( 88 | txt_file_path, 89 | data_path, 90 | args.batch_size, 91 | is_shuffle, 92 | ) 93 | return loader 94 | -------------------------------------------------------------------------------- /models/UNet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | class DoubleConv(nn.Module): 7 | """(convolution => [BN] => ReLU) * 2""" 8 | 9 | def __init__(self, in_channels, out_channels, mid_channels=None): 10 | super().__init__() 11 | if not mid_channels: 12 | mid_channels = out_channels 13 | self.double_conv = nn.Sequential( 14 | nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1, bias=False), 15 | nn.BatchNorm2d(mid_channels), 16 | nn.ReLU(inplace=True), 17 | nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1, bias=False), 18 | nn.BatchNorm2d(out_channels), 19 | nn.ReLU(inplace=True) 20 | ) 21 | 22 | def forward(self, x): 23 | return self.double_conv(x) 24 | 25 | 26 | class Down(nn.Module): 27 | """Downscaling with maxpool then double conv""" 28 | 29 | def __init__(self, in_channels, out_channels): 30 | super().__init__() 31 | self.maxpool_conv = nn.Sequential( 32 | nn.MaxPool2d(2), 33 | DoubleConv(in_channels, out_channels) 34 | ) 35 | 36 | def forward(self, x): 37 | return self.maxpool_conv(x) 38 | 39 | 40 | class Up(nn.Module): 41 | """Upscaling then double conv""" 42 | 43 | def __init__(self, in_channels, out_channels, bilinear=True): 44 | super().__init__() 45 | 46 | # if bilinear, use the normal convolutions to reduce the number of channels 47 | if bilinear: 48 | self.up = nn.Upsample(scale_factor=2, mode='nearest') 49 | self.conv = DoubleConv(in_channels, out_channels, in_channels // 2) 50 | else: 51 | self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) 52 | self.conv = DoubleConv(in_channels, out_channels) 53 | 54 | def forward(self, x1, x2): 55 | x1 = self.up(x1) 56 | # input is CHW 57 | diffY = x2.size()[2] - x1.size()[2] 58 | diffX = x2.size()[3] - x1.size()[3] 59 | 60 | x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, 61 | diffY // 2, diffY - diffY // 2]) 62 | # if you have padding issues, see 63 | # https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a 64 | # https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd 65 | x = torch.cat([x2, x1], dim=1) 66 | return self.conv(x) 67 | 68 | 69 | class OutConv(nn.Module): 70 | def __init__(self, in_channels, out_channels): 71 | super(OutConv, self).__init__() 72 | self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) 73 | 74 | def forward(self, x): 75 | return self.conv(x) 76 | 77 | 78 | class UNet(nn.Module): 79 | def __init__(self, n_channels, n_classes, bilinear=False): 80 | super(UNet, self).__init__() 81 | self.n_channels = n_channels 82 | self.n_classes = n_classes 83 | self.bilinear = bilinear 84 | 85 | self.inc = (DoubleConv(n_channels, 64)) 86 | self.down1 = (Down(64, 128)) 87 | self.down2 = (Down(128, 256)) 88 | # self.down3 = (Down(256, 512)) 89 | factor = 2 if bilinear else 1 90 | # self.down4 = (Down(512, 1024 // factor)) 91 | # self.up1 = (Up(1024, 512 // factor, bilinear)) 92 | # self.up2 = (Up(512, 256 // factor, bilinear)) 93 | self.up3 = (Up(256, 128 // factor, bilinear)) 94 | self.up4 = (Up(128, 64, bilinear)) 95 | self.outc = (OutConv(64, n_classes)) 96 | 97 | def forward(self, x): 98 | x1 = self.inc(x) 99 | x2 = self.down1(x1) 100 | x3 = self.down2(x2) 101 | # x4 = self.down3(x3) 102 | # x5 = self.down4(x4) 103 | # x = self.up1(x5, x4) 104 | # x = self.up2(x, x3) 105 | x = self.up3(x3, x2) 106 | x = self.up4(x, x1) 107 | logits = self.outc(x) 108 | return logits 109 | -------------------------------------------------------------------------------- /models/VIT.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from einops import rearrange, repeat 3 | from torch import nn 4 | 5 | 6 | def pair(t): 7 | return t if isinstance(t, tuple) else (t, t) 8 | 9 | 10 | # classes 11 | 12 | class FeedForward(nn.Module): 13 | def __init__(self, dim, hidden_dim, dropout=0.): 14 | super().__init__() 15 | self.net = nn.Sequential( 16 | nn.LayerNorm(dim), 17 | nn.Linear(dim, hidden_dim), 18 | nn.GELU(), 19 | nn.Dropout(dropout), 20 | nn.Linear(hidden_dim, dim), 21 | nn.Dropout(dropout) 22 | ) 23 | 24 | def forward(self, x): 25 | return self.net(x) 26 | 27 | 28 | class Attention(nn.Module): 29 | def __init__(self, dim, heads=8, dim_head=64, dropout=0.): 30 | super().__init__() 31 | inner_dim = dim_head * heads 32 | project_out = not (heads == 1 and dim_head == dim) 33 | 34 | self.heads = heads 35 | self.scale = dim_head ** -0.5 36 | 37 | self.norm = nn.LayerNorm(dim) 38 | 39 | self.attend = nn.Softmax(dim=-1) 40 | self.dropout = nn.Dropout(dropout) 41 | 42 | self.to_qkv = nn.Linear(dim, inner_dim * 3, bias=False) 43 | 44 | self.to_out = nn.Sequential( 45 | nn.Linear(inner_dim, dim), 46 | nn.Dropout(dropout) 47 | ) if project_out else nn.Identity() 48 | 49 | def forward(self, x): 50 | x = self.norm(x) 51 | 52 | qkv = self.to_qkv(x).chunk(3, dim=-1) 53 | q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=self.heads), qkv) 54 | 55 | dots = torch.matmul(q, k.transpose(-1, -2)) * self.scale 56 | 57 | attn = self.attend(dots) 58 | attn = self.dropout(attn) 59 | 60 | out = torch.matmul(attn, v) 61 | out = rearrange(out, 'b h n d -> b n (h d)') 62 | return self.to_out(out) 63 | 64 | 65 | class Transformer(nn.Module): 66 | def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout=0.): 67 | super().__init__() 68 | self.norm = nn.LayerNorm(dim) 69 | self.layers = nn.ModuleList([]) 70 | for _ in range(depth): 71 | self.layers.append(nn.ModuleList([ 72 | Attention(dim, heads=heads, dim_head=dim_head, dropout=dropout), 73 | FeedForward(dim, mlp_dim, dropout=dropout) 74 | ])) 75 | 76 | def forward(self, x): 77 | for attn, ff in self.layers: 78 | attn_result = attn(x) 79 | x = attn_result + x 80 | x = ff(x) + x 81 | return self.norm(x) 82 | 83 | 84 | class ViT(nn.Module): 85 | def __init__(self, *, image_size, patch_size, num_classes, dim, depth, heads, mlp_dim, channels=3, 86 | dim_head=64, dropout=0., emb_dropout=0.): 87 | super().__init__() 88 | image_height, image_width = pair(image_size) 89 | patch_height, patch_width = pair(patch_size) 90 | 91 | assert image_height % patch_height == 0 and image_width % patch_width == 0, ( 92 | 'Image dimensions must be divisible by the patch size.') 93 | 94 | num_patches = (image_height // patch_height) * (image_width // patch_width) 95 | patch_dim = channels * patch_height * patch_width 96 | 97 | self.to_patch_embedding = nn.Sequential( 98 | # Rearrange('b c (h p1) (w p2) -> b (h w) (p1 p2 c)', p1=patch_height, p2=patch_width), 99 | nn.LayerNorm(patch_dim), 100 | nn.Linear(patch_dim, dim), 101 | nn.LayerNorm(dim), 102 | ) 103 | 104 | self.pos_embedding = nn.Parameter(torch.randn(1, num_patches + 1, dim)) 105 | 106 | self.cls_token = nn.Parameter(torch.randn(1, 1, dim)) 107 | self.dropout = nn.Dropout(emb_dropout) 108 | 109 | self.transformer = Transformer(dim, depth, heads, dim_head, mlp_dim, dropout) 110 | 111 | self.to_latent = nn.Identity() 112 | 113 | self.mlp_head = nn.Linear(dim, num_classes) 114 | 115 | def forward(self, img): 116 | x = self.to_patch_embedding(img) 117 | b, n, _ = x.shape 118 | cls_tokens = repeat(self.cls_token, '1 1 d -> b 1 d', b=b) 119 | x = torch.cat((cls_tokens, x), dim=1) 120 | x += self.pos_embedding[:, :(n + 1)] 121 | x = self.dropout(x) 122 | 123 | x = self.transformer(x) 124 | x = x[:, 1:, :] 125 | x = self.to_latent(x) 126 | return self.mlp_head(x) 127 | -------------------------------------------------------------------------------- /models/backbones/PVTV2.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from functools import partial 4 | 5 | from timm.models.layers import DropPath, to_2tuple, trunc_normal_ 6 | from timm.models.vision_transformer import _cfg 7 | import math 8 | 9 | from mmengine.runner import load_checkpoint 10 | 11 | 12 | class Mlp(nn.Module): 13 | def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0., linear=False): 14 | super().__init__() 15 | out_features = out_features or in_features 16 | hidden_features = hidden_features or in_features 17 | self.fc1 = nn.Linear(in_features, hidden_features) 18 | self.dwconv = DWConv(hidden_features) 19 | self.act = act_layer() 20 | self.fc2 = nn.Linear(hidden_features, out_features) 21 | self.drop = nn.Dropout(drop) 22 | self.linear = linear 23 | if self.linear: 24 | self.relu = nn.ReLU(inplace=True) 25 | self.apply(self._init_weights) 26 | 27 | def _init_weights(self, m): 28 | if isinstance(m, nn.Linear): 29 | trunc_normal_(m.weight, std=.02) 30 | if isinstance(m, nn.Linear) and m.bias is not None: 31 | nn.init.constant_(m.bias, 0) 32 | elif isinstance(m, nn.LayerNorm): 33 | nn.init.constant_(m.bias, 0) 34 | nn.init.constant_(m.weight, 1.0) 35 | elif isinstance(m, nn.Conv2d): 36 | fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 37 | fan_out //= m.groups 38 | m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) 39 | if m.bias is not None: 40 | m.bias.data.zero_() 41 | 42 | def forward(self, x, H, W): 43 | x = self.fc1(x) 44 | if self.linear: 45 | x = self.relu(x) 46 | x = self.dwconv(x, H, W) 47 | x = self.act(x) 48 | x = self.drop(x) 49 | x = self.fc2(x) 50 | x = self.drop(x) 51 | return x 52 | 53 | 54 | class Attention(nn.Module): 55 | def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., sr_ratio=1, linear=False): 56 | super().__init__() 57 | assert dim % num_heads == 0, f"dim {dim} should be divided by num_heads {num_heads}." 58 | 59 | self.dim = dim 60 | self.num_heads = num_heads 61 | head_dim = dim // num_heads 62 | self.scale = qk_scale or head_dim ** -0.5 63 | 64 | self.q = nn.Linear(dim, dim, bias=qkv_bias) 65 | self.kv = nn.Linear(dim, dim * 2, bias=qkv_bias) 66 | self.attn_drop = nn.Dropout(attn_drop) 67 | self.proj = nn.Linear(dim, dim) 68 | self.proj_drop = nn.Dropout(proj_drop) 69 | 70 | self.linear = linear 71 | self.sr_ratio = sr_ratio 72 | if not linear: 73 | if sr_ratio > 1: 74 | self.sr = nn.Conv2d(dim, dim, kernel_size=sr_ratio, stride=sr_ratio) 75 | self.norm = nn.LayerNorm(dim) 76 | else: 77 | self.pool = nn.AdaptiveAvgPool2d(7) 78 | self.sr = nn.Conv2d(dim, dim, kernel_size=1, stride=1) 79 | self.norm = nn.LayerNorm(dim) 80 | self.act = nn.GELU() 81 | self.apply(self._init_weights) 82 | 83 | def _init_weights(self, m): 84 | if isinstance(m, nn.Linear): 85 | trunc_normal_(m.weight, std=.02) 86 | if isinstance(m, nn.Linear) and m.bias is not None: 87 | nn.init.constant_(m.bias, 0) 88 | elif isinstance(m, nn.LayerNorm): 89 | nn.init.constant_(m.bias, 0) 90 | nn.init.constant_(m.weight, 1.0) 91 | elif isinstance(m, nn.Conv2d): 92 | fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 93 | fan_out //= m.groups 94 | m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) 95 | if m.bias is not None: 96 | m.bias.data.zero_() 97 | 98 | def forward(self, x, H, W): 99 | B, N, C = x.shape 100 | q = self.q(x).reshape(B, N, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) 101 | 102 | if not self.linear: 103 | if self.sr_ratio > 1: 104 | x_ = x.permute(0, 2, 1).reshape(B, C, H, W) 105 | x_ = self.sr(x_).reshape(B, C, -1).permute(0, 2, 1) 106 | x_ = self.norm(x_) 107 | kv = self.kv(x_).reshape(B, -1, 2, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) 108 | else: 109 | kv = self.kv(x).reshape(B, -1, 2, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) 110 | else: 111 | x_ = x.permute(0, 2, 1).reshape(B, C, H, W) 112 | x_ = self.sr(self.pool(x_)).reshape(B, C, -1).permute(0, 2, 1) 113 | x_ = self.norm(x_) 114 | x_ = self.act(x_) 115 | kv = self.kv(x_).reshape(B, -1, 2, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) 116 | k, v = kv[0], kv[1] 117 | 118 | attn = (q @ k.transpose(-2, -1)) * self.scale 119 | attn = attn.softmax(dim=-1) 120 | attn = self.attn_drop(attn) 121 | 122 | x = (attn @ v).transpose(1, 2).reshape(B, N, C) 123 | x = self.proj(x) 124 | x = self.proj_drop(x) 125 | 126 | return x 127 | 128 | 129 | class Block(nn.Module): 130 | 131 | def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., 132 | drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, sr_ratio=1, linear=False): 133 | super().__init__() 134 | self.norm1 = norm_layer(dim) 135 | self.attn = Attention( 136 | dim, 137 | num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, 138 | attn_drop=attn_drop, proj_drop=drop, sr_ratio=sr_ratio, linear=linear) 139 | # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here 140 | self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() 141 | self.norm2 = norm_layer(dim) 142 | mlp_hidden_dim = int(dim * mlp_ratio) 143 | self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop, linear=linear) 144 | 145 | self.apply(self._init_weights) 146 | 147 | def _init_weights(self, m): 148 | if isinstance(m, nn.Linear): 149 | trunc_normal_(m.weight, std=.02) 150 | if isinstance(m, nn.Linear) and m.bias is not None: 151 | nn.init.constant_(m.bias, 0) 152 | elif isinstance(m, nn.LayerNorm): 153 | nn.init.constant_(m.bias, 0) 154 | nn.init.constant_(m.weight, 1.0) 155 | elif isinstance(m, nn.Conv2d): 156 | fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 157 | fan_out //= m.groups 158 | m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) 159 | if m.bias is not None: 160 | m.bias.data.zero_() 161 | 162 | def forward(self, x, H, W): 163 | x = x + self.drop_path(self.attn(self.norm1(x), H, W)) 164 | x = x + self.drop_path(self.mlp(self.norm2(x), H, W)) 165 | 166 | return x 167 | 168 | 169 | class OverlapPatchEmbed(nn.Module): 170 | """ Image to Patch Embedding 171 | """ 172 | 173 | def __init__(self, img_size=224, patch_size=7, stride=4, in_chans=3, embed_dim=768): 174 | super().__init__() 175 | 176 | img_size = to_2tuple(img_size) 177 | patch_size = to_2tuple(patch_size) 178 | 179 | assert max(patch_size) > stride, "Set larger patch_size than stride" 180 | 181 | self.img_size = img_size 182 | self.patch_size = patch_size 183 | self.H, self.W = img_size[0] // stride, img_size[1] // stride 184 | self.num_patches = self.H * self.W 185 | self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=stride, 186 | padding=(patch_size[0] // 2, patch_size[1] // 2)) 187 | self.norm = nn.LayerNorm(embed_dim) 188 | 189 | self.apply(self._init_weights) 190 | 191 | def _init_weights(self, m): 192 | if isinstance(m, nn.Linear): 193 | trunc_normal_(m.weight, std=.02) 194 | if isinstance(m, nn.Linear) and m.bias is not None: 195 | nn.init.constant_(m.bias, 0) 196 | elif isinstance(m, nn.LayerNorm): 197 | nn.init.constant_(m.bias, 0) 198 | nn.init.constant_(m.weight, 1.0) 199 | elif isinstance(m, nn.Conv2d): 200 | fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 201 | fan_out //= m.groups 202 | m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) 203 | if m.bias is not None: 204 | m.bias.data.zero_() 205 | 206 | def forward(self, x): 207 | x = self.proj(x) 208 | _, _, H, W = x.shape 209 | x = x.flatten(2).transpose(1, 2) 210 | x = self.norm(x) 211 | 212 | return x, H, W 213 | 214 | 215 | class PyramidVisionTransformerV2(nn.Module): 216 | def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dims=[64, 128, 256, 512], 217 | num_heads=[1, 2, 4, 8], mlp_ratios=[4, 4, 4, 4], qkv_bias=False, qk_scale=None, drop_rate=0., 218 | attn_drop_rate=0., drop_path_rate=0., norm_layer=nn.LayerNorm, 219 | depths=[3, 4, 6, 3], sr_ratios=[8, 4, 2, 1], num_stages=3, linear=False, pre_trained_path=None,): 220 | super().__init__() 221 | self.num_classes = num_classes 222 | self.depths = depths 223 | self.num_stages = num_stages 224 | 225 | dpr = [x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))] # stochastic depth decay rule 226 | cur = 0 227 | 228 | for i in range(num_stages): 229 | patch_embed = OverlapPatchEmbed(img_size=img_size if i == 0 else img_size // (2 ** (i + 1)), 230 | patch_size=7 if i == 0 else 3, 231 | stride=4 if i == 0 else 2, 232 | in_chans=in_chans if i == 0 else embed_dims[i - 1], 233 | embed_dim=embed_dims[i]) 234 | 235 | block = nn.ModuleList([Block( 236 | dim=embed_dims[i], num_heads=num_heads[i], mlp_ratio=mlp_ratios[i], qkv_bias=qkv_bias, qk_scale=qk_scale, 237 | drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[cur + j], norm_layer=norm_layer, 238 | sr_ratio=sr_ratios[i], linear=linear) 239 | for j in range(depths[i])]) 240 | norm = norm_layer(embed_dims[i]) 241 | cur += depths[i] 242 | 243 | setattr(self, f"patch_embed{i + 1}", patch_embed) 244 | setattr(self, f"block{i + 1}", block) 245 | setattr(self, f"norm{i + 1}", norm) 246 | 247 | # classification head 248 | # self.head = nn.Linear(embed_dims[3], num_classes) if num_classes > 0 else nn.Identity() 249 | 250 | load_checkpoint(self, pre_trained_path, map_location='cpu', strict=False) 251 | 252 | def _init_weights(self, m): 253 | if isinstance(m, nn.Linear): 254 | trunc_normal_(m.weight, std=.02) 255 | if isinstance(m, nn.Linear) and m.bias is not None: 256 | nn.init.constant_(m.bias, 0) 257 | elif isinstance(m, nn.LayerNorm): 258 | nn.init.constant_(m.bias, 0) 259 | nn.init.constant_(m.weight, 1.0) 260 | elif isinstance(m, nn.Conv2d): 261 | fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 262 | fan_out //= m.groups 263 | m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) 264 | if m.bias is not None: 265 | m.bias.data.zero_() 266 | 267 | def freeze_patch_emb(self): 268 | self.patch_embed1.requires_grad = False 269 | 270 | @torch.jit.ignore 271 | def no_weight_decay(self): 272 | return {'pos_embed1', 'pos_embed2', 'pos_embed3', 'pos_embed4', 'cls_token'} # has pos_embed may be better 273 | 274 | def get_classifier(self): 275 | return self.head 276 | 277 | def reset_classifier(self, num_classes, global_pool=''): 278 | self.num_classes = num_classes 279 | self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() 280 | 281 | def forward_features(self, x): 282 | B = x.shape[0] 283 | out = [] 284 | 285 | for i in range(self.num_stages): 286 | patch_embed = getattr(self, f"patch_embed{i + 1}") 287 | block = getattr(self, f"block{i + 1}") 288 | norm = getattr(self, f"norm{i + 1}") 289 | x, H, W = patch_embed(x) 290 | for blk in block: 291 | x = blk(x, H, W) 292 | x = norm(x) 293 | if i != self.num_stages - 1: 294 | x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() 295 | 296 | out.append(x) 297 | 298 | return x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous(), out[:-1] 299 | 300 | def forward(self, x): 301 | x, out = self.forward_features(x) 302 | # x = self.head(x) 303 | 304 | return x, out 305 | 306 | 307 | class DWConv(nn.Module): 308 | def __init__(self, dim=768): 309 | super(DWConv, self).__init__() 310 | self.dwconv = nn.Conv2d(dim, dim, 3, 1, 1, bias=True, groups=dim) 311 | 312 | def forward(self, x, H, W): 313 | B, N, C = x.shape 314 | x = x.transpose(1, 2).view(B, C, H, W) 315 | x = self.dwconv(x) 316 | x = x.flatten(2).transpose(1, 2) 317 | 318 | return x 319 | 320 | 321 | def _conv_filter(state_dict, patch_size=16): 322 | """ convert patch embedding weight from manual patchify + linear proj to conv""" 323 | out_dict = {} 324 | for k, v in state_dict.items(): 325 | if 'patch_embed.proj.weight' in k: 326 | v = v.reshape((v.shape[0], 3, patch_size, patch_size)) 327 | out_dict[k] = v 328 | 329 | return out_dict 330 | 331 | 332 | # @register_model 333 | def pvt_v2_b1(**kwargs): 334 | model = PyramidVisionTransformerV2( 335 | patch_size=4, embed_dims=[64, 128, 320], num_heads=[1, 2, 5], mlp_ratios=[8, 8, 4], qkv_bias=True, 336 | norm_layer=partial(nn.LayerNorm, eps=1e-6), depths=[2, 2, 2], sr_ratios=[8, 4, 2], 337 | **kwargs) 338 | model.default_cfg = _cfg() 339 | 340 | return model 341 | -------------------------------------------------------------------------------- /models/backbones/resnet.py: -------------------------------------------------------------------------------- 1 | from typing import Type, Any, Callable, Union, List, Optional 2 | 3 | import torch.nn as nn 4 | from torch import Tensor 5 | 6 | from torchvision.utils import _log_api_usage_once 7 | from torchvision._internally_replaced_utils import load_state_dict_from_url 8 | 9 | __all__ = [ 10 | "ResNet", 11 | "resnet18", 12 | "resnet34", 13 | "resnet50", 14 | ] 15 | 16 | 17 | model_urls = { 18 | "resnet18": "https://download.pytorch.org/models/resnet18-f37072fd.pth", 19 | "resnet34": "https://download.pytorch.org/models/resnet34-b627a593.pth", 20 | "resnet50": "https://download.pytorch.org/models/resnet50-0676ba61.pth", 21 | } 22 | 23 | 24 | def conv3x3(in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1) -> nn.Conv2d: 25 | """3x3 convolution with padding""" 26 | return nn.Conv2d( 27 | in_planes, 28 | out_planes, 29 | kernel_size=3, 30 | stride=stride, 31 | padding=dilation, 32 | groups=groups, 33 | bias=False, 34 | dilation=dilation, 35 | ) 36 | 37 | 38 | def conv1x1(in_planes: int, out_planes: int, stride: int = 1) -> nn.Conv2d: 39 | """1x1 convolution""" 40 | return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) 41 | 42 | 43 | class BasicBlock(nn.Module): 44 | expansion: int = 1 45 | 46 | def __init__( 47 | self, 48 | inplanes: int, 49 | planes: int, 50 | stride: int = 1, 51 | downsample: Optional[nn.Module] = None, 52 | groups: int = 1, 53 | base_width: int = 64, 54 | dilation: int = 1, 55 | norm_layer: Optional[Callable[..., nn.Module]] = None, 56 | ) -> None: 57 | super().__init__() 58 | if norm_layer is None: 59 | norm_layer = nn.BatchNorm2d 60 | if groups != 1 or base_width != 64: 61 | raise ValueError("BasicBlock only supports groups=1 and base_width=64") 62 | if dilation > 1: 63 | raise NotImplementedError("Dilation > 1 not supported in BasicBlock") 64 | # Both self.conv1 and self.downsample layers downsample the input when stride != 1 65 | self.conv1 = conv3x3(inplanes, planes, stride) 66 | self.bn1 = norm_layer(planes) 67 | self.relu = nn.ReLU(inplace=True) 68 | self.conv2 = conv3x3(planes, planes) 69 | self.bn2 = norm_layer(planes) 70 | self.downsample = downsample 71 | self.stride = stride 72 | 73 | def forward(self, x: Tensor) -> Tensor: 74 | identity = x 75 | 76 | out = self.conv1(x) 77 | out = self.bn1(out) 78 | out = self.relu(out) 79 | 80 | out = self.conv2(out) 81 | out = self.bn2(out) 82 | 83 | if self.downsample is not None: 84 | identity = self.downsample(x) 85 | 86 | out += identity 87 | out = self.relu(out) 88 | 89 | return out 90 | 91 | 92 | class Bottleneck(nn.Module): 93 | # Bottleneck in torchvision places the stride for downsampling at 3x3 convolution(self.conv2) 94 | # while original implementation places the stride at the first 1x1 convolution(self.conv1) 95 | # according to "Deep residual learning for image recognition"https://arxiv.org/abs/1512.03385. 96 | # This variant is also known as ResNet V1.5 and improves accuracy according to 97 | # https://ngc.nvidia.com/catalog/model-scripts/nvidia:resnet_50_v1_5_for_pytorch. 98 | 99 | expansion: int = 4 100 | 101 | def __init__( 102 | self, 103 | inplanes: int, 104 | planes: int, 105 | stride: int = 1, 106 | downsample: Optional[nn.Module] = None, 107 | groups: int = 1, 108 | base_width: int = 64, 109 | dilation: int = 1, 110 | norm_layer: Optional[Callable[..., nn.Module]] = None, 111 | ) -> None: 112 | super().__init__() 113 | if norm_layer is None: 114 | norm_layer = nn.BatchNorm2d 115 | width = int(planes * (base_width / 64.0)) * groups 116 | # Both self.conv2 and self.downsample layers downsample the input when stride != 1 117 | self.conv1 = conv1x1(inplanes, width) 118 | self.bn1 = norm_layer(width) 119 | self.conv2 = conv3x3(width, width, stride, groups, dilation) 120 | self.bn2 = norm_layer(width) 121 | self.conv3 = conv1x1(width, planes * self.expansion) 122 | self.bn3 = norm_layer(planes * self.expansion) 123 | self.relu = nn.ReLU(inplace=True) 124 | self.downsample = downsample 125 | self.stride = stride 126 | 127 | def forward(self, x: Tensor) -> Tensor: 128 | identity = x 129 | 130 | out = self.conv1(x) 131 | out = self.bn1(out) 132 | out = self.relu(out) 133 | 134 | out = self.conv2(out) 135 | out = self.bn2(out) 136 | out = self.relu(out) 137 | 138 | out = self.conv3(out) 139 | out = self.bn3(out) 140 | 141 | if self.downsample is not None: 142 | identity = self.downsample(x) 143 | 144 | out += identity 145 | out = self.relu(out) 146 | 147 | return out 148 | 149 | 150 | class ResNet(nn.Module): 151 | def __init__( 152 | self, 153 | block: Type[Union[BasicBlock, Bottleneck]], 154 | layers: List[int], 155 | in_channels: int = 3, 156 | zero_init_residual: bool = False, 157 | groups: int = 1, 158 | width_per_group: int = 64, 159 | replace_stride_with_dilation: Optional[List[bool]] = None, 160 | norm_layer: Optional[Callable[..., nn.Module]] = None, 161 | ) -> None: 162 | super().__init__() 163 | _log_api_usage_once(self) 164 | if norm_layer is None: 165 | norm_layer = nn.BatchNorm2d 166 | self._norm_layer = norm_layer 167 | 168 | self.inplanes = 64 169 | self.dilation = 1 170 | if replace_stride_with_dilation is None: 171 | # each element in the tuple indicates if we should replace 172 | # the 2x2 stride with a dilated convolution instead 173 | replace_stride_with_dilation = [False, False, False] 174 | if len(replace_stride_with_dilation) != 3: 175 | raise ValueError( 176 | "replace_stride_with_dilation should be None " 177 | f"or a 3-element tuple, got {replace_stride_with_dilation}" 178 | ) 179 | self.groups = groups 180 | self.base_width = width_per_group 181 | self.conv1 = nn.Conv2d(in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) 182 | self.bn1 = norm_layer(self.inplanes) 183 | self.relu = nn.ReLU(inplace=True) 184 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) 185 | self.layer1 = self._make_layer(block, 64, layers[0]) 186 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) 187 | # self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) 188 | # self.layer4 = self._make_layer(block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) 189 | # self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) 190 | # self.fc = nn.Linear(512 * block.expansion, num_classes) 191 | 192 | for m in self.modules(): 193 | if isinstance(m, nn.Conv2d): 194 | nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") 195 | elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): 196 | nn.init.constant_(m.weight, 1) 197 | nn.init.constant_(m.bias, 0) 198 | 199 | # Zero-initialize the last BN in each residual branch, 200 | # so that the residual branch starts with zeros, and each residual block behaves like an identity. 201 | # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 202 | if zero_init_residual: 203 | for m in self.modules(): 204 | if isinstance(m, Bottleneck): 205 | nn.init.constant_(m.bn3.weight, 0) # type: ignore[arg-type] 206 | elif isinstance(m, BasicBlock): 207 | nn.init.constant_(m.bn2.weight, 0) # type: ignore[arg-type] 208 | 209 | def _make_layer( 210 | self, 211 | block: Type[Union[BasicBlock, Bottleneck]], 212 | planes: int, 213 | blocks: int, 214 | stride: int = 1, 215 | dilate: bool = False, 216 | ) -> nn.Sequential: 217 | norm_layer = self._norm_layer 218 | downsample = None 219 | previous_dilation = self.dilation 220 | if dilate: 221 | self.dilation *= stride 222 | stride = 1 223 | if stride != 1 or self.inplanes != planes * block.expansion: 224 | downsample = nn.Sequential( 225 | conv1x1(self.inplanes, planes * block.expansion, stride), 226 | norm_layer(planes * block.expansion), 227 | ) 228 | 229 | layers = [] 230 | layers.append( 231 | block( 232 | self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer 233 | ) 234 | ) 235 | self.inplanes = planes * block.expansion 236 | for _ in range(1, blocks): 237 | layers.append( 238 | block( 239 | self.inplanes, 240 | planes, 241 | groups=self.groups, 242 | base_width=self.base_width, 243 | dilation=self.dilation, 244 | norm_layer=norm_layer, 245 | ) 246 | ) 247 | 248 | return nn.Sequential(*layers) 249 | 250 | def _forward_impl(self, x: Tensor) -> Tensor: 251 | # See note [TorchScript super()] 252 | out = [] 253 | x = self.conv1(x) 254 | x = self.bn1(x) 255 | x = self.relu(x) 256 | out.append(x) 257 | x = self.maxpool(x) 258 | 259 | x = self.layer1(x) 260 | out.append(x) 261 | x = self.layer2(x) 262 | # x = self.layer3(x) 263 | # x = self.layer4(x) 264 | 265 | # x = self.avgpool(x) 266 | # x = torch.flatten(x, 1) 267 | # x = self.fc(x) 268 | 269 | return x, out 270 | 271 | def forward(self, x: Tensor) -> Tensor: 272 | return self._forward_impl(x) 273 | 274 | 275 | def _resnet( 276 | arch: str, 277 | block: Type[Union[BasicBlock, Bottleneck]], 278 | layers: List[int], 279 | pretrained: bool, 280 | progress: bool, 281 | **kwargs: Any, 282 | ) -> ResNet: 283 | model = ResNet(block, layers, **kwargs) 284 | if pretrained: 285 | state_dict = load_state_dict_from_url(model_urls[arch], progress=progress) 286 | model_dict = model.state_dict() 287 | for k, v in state_dict.items(): 288 | if k == 'conv1.weight' and kwargs['in_channels'] != 3: 289 | print(f"in_channels = {kwargs['in_channels']}, jumping conv1.weight!") 290 | continue 291 | 292 | if k in model_dict: 293 | model_dict[k] = v 294 | 295 | model.load_state_dict(model_dict) 296 | return model 297 | 298 | 299 | def resnet18(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet: 300 | r"""ResNet-18 model from 301 | `"Deep Residual Learning for Image Recognition" `_. 302 | 303 | Args: 304 | pretrained (bool): If True, returns a model pre-trained on ImageNet 305 | progress (bool): If True, displays a progress bar of the download to stderr 306 | """ 307 | return _resnet("resnet18", BasicBlock, [2, 2, 2, 2], pretrained, progress, **kwargs) 308 | 309 | def resnet34(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet: 310 | r"""ResNet-34 model from 311 | `"Deep Residual Learning for Image Recognition" `_. 312 | 313 | Args: 314 | pretrained (bool): If True, returns a model pre-trained on ImageNet 315 | progress (bool): If True, displays a progress bar of the download to stderr 316 | """ 317 | return _resnet("resnet34", BasicBlock, [3, 4, 6, 3], pretrained, progress, **kwargs) 318 | 319 | 320 | def resnet50(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet: 321 | r"""ResNet-50 model from 322 | `"Deep Residual Learning for Image Recognition" `_. 323 | 324 | Args: 325 | pretrained (bool): If True, returns a model pre-trained on ImageNet 326 | progress (bool): If True, displays a progress bar of the download to stderr 327 | """ 328 | return _resnet("resnet50", Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs) -------------------------------------------------------------------------------- /models/backbones/swin.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Optional, Callable, List, Any 3 | 4 | import torch 5 | import torch.nn.functional as F 6 | from torch import nn, Tensor 7 | 8 | from torchvision.ops.misc import MLP, Permute 9 | from torchvision.ops.stochastic_depth import StochasticDepth 10 | from torchvision.transforms._presets import ImageClassification, InterpolationMode 11 | from torchvision.utils import _log_api_usage_once 12 | from torchvision.models._api import WeightsEnum, Weights 13 | from torchvision.models._meta import _IMAGENET_CATEGORIES 14 | from torchvision.models._utils import _ovewrite_named_param 15 | 16 | 17 | __all__ = [ 18 | "SwinTransformer", 19 | "Swin_T_Weights", 20 | "Swin_S_Weights", 21 | "Swin_B_Weights", 22 | "swin_t", 23 | "swin_s", 24 | "swin_b", 25 | ] 26 | 27 | 28 | def _patch_merging_pad(x): 29 | H, W, _ = x.shape[-3:] 30 | x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2)) 31 | return x 32 | 33 | 34 | torch.fx.wrap("_patch_merging_pad") 35 | 36 | 37 | class PatchMerging(nn.Module): 38 | """Patch Merging Layer. 39 | Args: 40 | dim (int): Number of input channels. 41 | norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. 42 | """ 43 | 44 | def __init__(self, dim: int, norm_layer: Callable[..., nn.Module] = nn.LayerNorm): 45 | super().__init__() 46 | _log_api_usage_once(self) 47 | self.dim = dim 48 | self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) 49 | self.norm = norm_layer(4 * dim) 50 | 51 | def forward(self, x: Tensor): 52 | """ 53 | Args: 54 | x (Tensor): input tensor with expected layout of [..., H, W, C] 55 | Returns: 56 | Tensor with layout of [..., H/2, W/2, 2*C] 57 | """ 58 | x = _patch_merging_pad(x) 59 | 60 | x0 = x[..., 0::2, 0::2, :] # ... H/2 W/2 C 61 | x1 = x[..., 1::2, 0::2, :] # ... H/2 W/2 C 62 | x2 = x[..., 0::2, 1::2, :] # ... H/2 W/2 C 63 | x3 = x[..., 1::2, 1::2, :] # ... H/2 W/2 C 64 | x = torch.cat([x0, x1, x2, x3], -1) # ... H/2 W/2 4*C 65 | 66 | x = self.norm(x) 67 | x = self.reduction(x) # ... H/2 W/2 2*C 68 | return x 69 | 70 | 71 | def shifted_window_attention( 72 | input: Tensor, 73 | qkv_weight: Tensor, 74 | proj_weight: Tensor, 75 | relative_position_bias: Tensor, 76 | window_size: List[int], 77 | num_heads: int, 78 | shift_size: List[int], 79 | attention_dropout: float = 0.0, 80 | dropout: float = 0.0, 81 | qkv_bias: Optional[Tensor] = None, 82 | proj_bias: Optional[Tensor] = None, 83 | ): 84 | """ 85 | Window based multi-head self attention (W-MSA) module with relative position bias. 86 | It supports both of shifted and non-shifted window. 87 | Args: 88 | input (Tensor[N, H, W, C]): The input tensor or 4-dimensions. 89 | qkv_weight (Tensor[in_dim, out_dim]): The weight tensor of query, key, value. 90 | proj_weight (Tensor[out_dim, out_dim]): The weight tensor of projection. 91 | relative_position_bias (Tensor): The learned relative position bias added to attention. 92 | window_size (List[int]): Window size. 93 | num_heads (int): Number of attention heads. 94 | shift_size (List[int]): Shift size for shifted window attention. 95 | attention_dropout (float): Dropout ratio of attention weight. Default: 0.0. 96 | dropout (float): Dropout ratio of output. Default: 0.0. 97 | qkv_bias (Tensor[out_dim], optional): The bias tensor of query, key, value. Default: None. 98 | proj_bias (Tensor[out_dim], optional): The bias tensor of projection. Default: None. 99 | Returns: 100 | Tensor[N, H, W, C]: The output tensor after shifted window attention. 101 | """ 102 | B, H, W, C = input.shape 103 | # pad feature maps to multiples of window size 104 | pad_r = (window_size[1] - W % window_size[1]) % window_size[1] 105 | pad_b = (window_size[0] - H % window_size[0]) % window_size[0] 106 | x = F.pad(input, (0, 0, 0, pad_r, 0, pad_b)) 107 | _, pad_H, pad_W, _ = x.shape 108 | 109 | # If window size is larger than feature size, there is no need to shift window 110 | if window_size[0] >= pad_H: 111 | shift_size[0] = 0 112 | if window_size[1] >= pad_W: 113 | shift_size[1] = 0 114 | 115 | # cyclic shift 116 | if sum(shift_size) > 0: 117 | x = torch.roll(x, shifts=(-shift_size[0], -shift_size[1]), dims=(1, 2)) 118 | 119 | # partition windows 120 | num_windows = (pad_H // window_size[0]) * (pad_W // window_size[1]) 121 | x = x.view(B, pad_H // window_size[0], window_size[0], pad_W // window_size[1], window_size[1], C) 122 | x = x.permute(0, 1, 3, 2, 4, 5).reshape(B * num_windows, window_size[0] * window_size[1], C) # B*nW, Ws*Ws, C 123 | 124 | # multi-head attention 125 | qkv = F.linear(x, qkv_weight, qkv_bias) 126 | qkv = qkv.reshape(x.size(0), x.size(1), 3, num_heads, C // num_heads).permute(2, 0, 3, 1, 4) 127 | q, k, v = qkv[0], qkv[1], qkv[2] 128 | q = q * (C // num_heads) ** -0.5 129 | attn = q.matmul(k.transpose(-2, -1)) 130 | # add relative position bias 131 | attn = attn + relative_position_bias 132 | 133 | if sum(shift_size) > 0: 134 | # generate attention mask 135 | attn_mask = x.new_zeros((pad_H, pad_W)) 136 | h_slices = ((0, -window_size[0]), (-window_size[0], -shift_size[0]), (-shift_size[0], None)) 137 | w_slices = ((0, -window_size[1]), (-window_size[1], -shift_size[1]), (-shift_size[1], None)) 138 | count = 0 139 | for h in h_slices: 140 | for w in w_slices: 141 | attn_mask[h[0] : h[1], w[0] : w[1]] = count 142 | count += 1 143 | attn_mask = attn_mask.view(pad_H // window_size[0], window_size[0], pad_W // window_size[1], window_size[1]) 144 | attn_mask = attn_mask.permute(0, 2, 1, 3).reshape(num_windows, window_size[0] * window_size[1]) 145 | attn_mask = attn_mask.unsqueeze(1) - attn_mask.unsqueeze(2) 146 | attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0)) 147 | attn = attn.view(x.size(0) // num_windows, num_windows, num_heads, x.size(1), x.size(1)) 148 | attn = attn + attn_mask.unsqueeze(1).unsqueeze(0) 149 | attn = attn.view(-1, num_heads, x.size(1), x.size(1)) 150 | 151 | attn = F.softmax(attn, dim=-1) 152 | attn = F.dropout(attn, p=attention_dropout) 153 | 154 | x = attn.matmul(v).transpose(1, 2).reshape(x.size(0), x.size(1), C) 155 | x = F.linear(x, proj_weight, proj_bias) 156 | x = F.dropout(x, p=dropout) 157 | 158 | # reverse windows 159 | x = x.view(B, pad_H // window_size[0], pad_W // window_size[1], window_size[0], window_size[1], C) 160 | x = x.permute(0, 1, 3, 2, 4, 5).reshape(B, pad_H, pad_W, C) 161 | 162 | # reverse cyclic shift 163 | if sum(shift_size) > 0: 164 | x = torch.roll(x, shifts=(shift_size[0], shift_size[1]), dims=(1, 2)) 165 | 166 | # unpad features 167 | x = x[:, :H, :W, :].contiguous() 168 | return x 169 | 170 | 171 | torch.fx.wrap("shifted_window_attention") 172 | 173 | 174 | class ShiftedWindowAttention(nn.Module): 175 | """ 176 | See :func:`shifted_window_attention`. 177 | """ 178 | 179 | def __init__( 180 | self, 181 | dim: int, 182 | window_size: List[int], 183 | shift_size: List[int], 184 | num_heads: int, 185 | qkv_bias: bool = True, 186 | proj_bias: bool = True, 187 | attention_dropout: float = 0.0, 188 | dropout: float = 0.0, 189 | ): 190 | super().__init__() 191 | if len(window_size) != 2 or len(shift_size) != 2: 192 | raise ValueError("window_size and shift_size must be of length 2") 193 | self.window_size = window_size 194 | self.shift_size = shift_size 195 | self.num_heads = num_heads 196 | self.attention_dropout = attention_dropout 197 | self.dropout = dropout 198 | 199 | self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) 200 | self.proj = nn.Linear(dim, dim, bias=proj_bias) 201 | 202 | # define a parameter table of relative position bias 203 | self.relative_position_bias_table = nn.Parameter( 204 | torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) 205 | ) # 2*Wh-1 * 2*Ww-1, nH 206 | 207 | # get pair-wise relative position index for each token inside the window 208 | coords_h = torch.arange(self.window_size[0]) 209 | coords_w = torch.arange(self.window_size[1]) 210 | coords = torch.stack(torch.meshgrid(coords_h, coords_w, indexing="ij")) # 2, Wh, Ww 211 | coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww 212 | relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww 213 | relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2 214 | relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 215 | relative_coords[:, :, 1] += self.window_size[1] - 1 216 | relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 217 | relative_position_index = relative_coords.sum(-1).view(-1) # Wh*Ww*Wh*Ww 218 | self.register_buffer("relative_position_index", relative_position_index) 219 | 220 | nn.init.trunc_normal_(self.relative_position_bias_table, std=0.02) 221 | 222 | def forward(self, x: Tensor): 223 | """ 224 | Args: 225 | x (Tensor): Tensor with layout of [B, H, W, C] 226 | Returns: 227 | Tensor with same layout as input, i.e. [B, H, W, C] 228 | """ 229 | 230 | N = self.window_size[0] * self.window_size[1] 231 | relative_position_bias = self.relative_position_bias_table[self.relative_position_index] # type: ignore[index] 232 | relative_position_bias = relative_position_bias.view(N, N, -1) 233 | relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous().unsqueeze(0) 234 | 235 | return shifted_window_attention( 236 | x, 237 | self.qkv.weight, 238 | self.proj.weight, 239 | relative_position_bias, 240 | self.window_size, 241 | self.num_heads, 242 | shift_size=self.shift_size, 243 | attention_dropout=self.attention_dropout, 244 | dropout=self.dropout, 245 | qkv_bias=self.qkv.bias, 246 | proj_bias=self.proj.bias, 247 | ) 248 | 249 | 250 | class SwinTransformerBlock(nn.Module): 251 | """ 252 | Swin Transformer Block. 253 | Args: 254 | dim (int): Number of input channels. 255 | num_heads (int): Number of attention heads. 256 | window_size (List[int]): Window size. 257 | shift_size (List[int]): Shift size for shifted window attention. 258 | mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.0. 259 | dropout (float): Dropout rate. Default: 0.0. 260 | attention_dropout (float): Attention dropout rate. Default: 0.0. 261 | stochastic_depth_prob: (float): Stochastic depth rate. Default: 0.0. 262 | norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. 263 | attn_layer (nn.Module): Attention layer. Default: ShiftedWindowAttention 264 | """ 265 | 266 | def __init__( 267 | self, 268 | dim: int, 269 | num_heads: int, 270 | window_size: List[int], 271 | shift_size: List[int], 272 | mlp_ratio: float = 4.0, 273 | dropout: float = 0.0, 274 | attention_dropout: float = 0.0, 275 | stochastic_depth_prob: float = 0.0, 276 | norm_layer: Callable[..., nn.Module] = nn.LayerNorm, 277 | attn_layer: Callable[..., nn.Module] = ShiftedWindowAttention, 278 | ): 279 | super().__init__() 280 | _log_api_usage_once(self) 281 | 282 | self.norm1 = norm_layer(dim) 283 | self.attn = attn_layer( 284 | dim, 285 | window_size, 286 | shift_size, 287 | num_heads, 288 | attention_dropout=attention_dropout, 289 | dropout=dropout, 290 | ) 291 | self.stochastic_depth = StochasticDepth(stochastic_depth_prob, "row") 292 | self.norm2 = norm_layer(dim) 293 | self.mlp = MLP(dim, [int(dim * mlp_ratio), dim], activation_layer=nn.GELU, inplace=None, dropout=dropout) 294 | 295 | for m in self.mlp.modules(): 296 | if isinstance(m, nn.Linear): 297 | nn.init.xavier_uniform_(m.weight) 298 | if m.bias is not None: 299 | nn.init.normal_(m.bias, std=1e-6) 300 | 301 | def forward(self, x: Tensor): 302 | x = x + self.stochastic_depth(self.attn(self.norm1(x))) 303 | x = x + self.stochastic_depth(self.mlp(self.norm2(x))) 304 | return x 305 | 306 | 307 | class SwinTransformer(nn.Module): 308 | """ 309 | Implements Swin Transformer from the `"Swin Transformer: Hierarchical Vision Transformer using 310 | Shifted Windows" `_ paper. 311 | Args: 312 | patch_size (List[int]): Patch size. 313 | embed_dim (int): Patch embedding dimension. 314 | depths (List(int)): Depth of each Swin Transformer layer. 315 | num_heads (List(int)): Number of attention heads in different layers. 316 | window_size (List[int]): Window size. 317 | mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.0. 318 | dropout (float): Dropout rate. Default: 0.0. 319 | attention_dropout (float): Attention dropout rate. Default: 0.0. 320 | stochastic_depth_prob (float): Stochastic depth rate. Default: 0.0. 321 | num_classes (int): Number of classes for classification head. Default: 1000. 322 | block (nn.Module, optional): SwinTransformer Block. Default: None. 323 | norm_layer (nn.Module, optional): Normalization layer. Default: None. 324 | """ 325 | 326 | def __init__( 327 | self, 328 | patch_size: List[int], 329 | embed_dim: int, 330 | depths: List[int], 331 | num_heads: List[int], 332 | window_size: List[int], 333 | mlp_ratio: float = 4.0, 334 | dropout: float = 0.0, 335 | attention_dropout: float = 0.0, 336 | stochastic_depth_prob: float = 0.0, 337 | num_classes: int = 1000, 338 | norm_layer: Optional[Callable[..., nn.Module]] = None, 339 | block: Optional[Callable[..., nn.Module]] = None, 340 | in_channel: int = 32, 341 | ): 342 | super().__init__() 343 | _log_api_usage_once(self) 344 | self.num_classes = num_classes 345 | 346 | if block is None: 347 | block = SwinTransformerBlock 348 | 349 | if norm_layer is None: 350 | norm_layer = partial(nn.LayerNorm, eps=1e-5) 351 | 352 | layers: List[nn.Module] = [] 353 | # split image into non-overlapping patches 354 | layers.append( 355 | nn.Sequential( 356 | nn.Conv2d( 357 | in_channel, embed_dim, kernel_size=(patch_size[0], patch_size[1]), stride=(patch_size[0], patch_size[1]) 358 | ), 359 | Permute([0, 2, 3, 1]), 360 | norm_layer(embed_dim), 361 | ) 362 | ) 363 | 364 | total_stage_blocks = sum(depths) 365 | stage_block_id = 0 366 | # build SwinTransformer blocks 367 | for i_stage in range(len(depths)): 368 | stage: List[nn.Module] = [] 369 | dim = embed_dim * 2 ** i_stage 370 | for i_layer in range(depths[i_stage]): 371 | # adjust stochastic depth probability based on the depth of the stage block 372 | sd_prob = stochastic_depth_prob * float(stage_block_id) / (total_stage_blocks - 1) 373 | stage.append( 374 | block( 375 | dim, 376 | num_heads[i_stage], 377 | window_size=window_size, 378 | shift_size=[0 if i_layer % 2 == 0 else w // 2 for w in window_size], 379 | mlp_ratio=mlp_ratio, 380 | dropout=dropout, 381 | attention_dropout=attention_dropout, 382 | stochastic_depth_prob=sd_prob, 383 | norm_layer=norm_layer, 384 | ) 385 | ) 386 | stage_block_id += 1 387 | layers.append(nn.Sequential(*stage)) 388 | # add patch merging layer 389 | if i_stage < (len(depths) - 1): 390 | layers.append(PatchMerging(dim, norm_layer)) 391 | self.features = nn.Sequential(*layers) 392 | 393 | # num_features = embed_dim * 2 ** (len(depths) - 1) 394 | # self.norm = norm_layer(num_features) 395 | # self.avgpool = nn.AdaptiveAvgPool2d(1) 396 | # self.head = nn.Linear(num_features, num_classes) 397 | 398 | for m in self.modules(): 399 | if isinstance(m, nn.Linear): 400 | nn.init.trunc_normal_(m.weight, std=0.02) 401 | if m.bias is not None: 402 | nn.init.zeros_(m.bias) 403 | 404 | def forward(self, x): 405 | out = [] 406 | for i, blk in enumerate(self.features): 407 | x = blk(x) 408 | if i in {1, 3}: 409 | out.append(x.permute(0, 3, 1, 2)) 410 | # x = self.norm(x) 411 | x = x.permute(0, 3, 1, 2) 412 | # x = self.avgpool(x) 413 | # x = torch.flatten(x, 1) 414 | # x = self.head(x) 415 | return x, out 416 | 417 | 418 | def _swin_transformer( 419 | patch_size: List[int], 420 | embed_dim: int, 421 | depths: List[int], 422 | num_heads: List[int], 423 | window_size: List[int], 424 | stochastic_depth_prob: float, 425 | weights: Optional[WeightsEnum], 426 | progress: bool, 427 | **kwargs: Any, 428 | ) -> SwinTransformer: 429 | if weights is not None: 430 | _ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"])) 431 | 432 | model = SwinTransformer( 433 | patch_size=patch_size, 434 | embed_dim=embed_dim, 435 | depths=depths, 436 | num_heads=num_heads, 437 | window_size=window_size, 438 | stochastic_depth_prob=stochastic_depth_prob, 439 | **kwargs, 440 | ) 441 | 442 | if weights is not None: 443 | state_dict = weights.get_state_dict(progress=progress) 444 | model_dict = model.state_dict() 445 | for k, v in state_dict.items(): 446 | if k == 'features.0.0.weight' and kwargs['in_channel'] != 3: 447 | print(f"in_channels = {kwargs['in_channel']}, jumping features.0.0.weight!") 448 | continue 449 | 450 | model.load_state_dict(model_dict, strict=False) 451 | 452 | return model 453 | 454 | 455 | _COMMON_META = { 456 | "categories": _IMAGENET_CATEGORIES, 457 | } 458 | 459 | 460 | class Swin_T_Weights(WeightsEnum): 461 | IMAGENET1K_V1 = Weights( 462 | url="https://download.pytorch.org/models/swin_t-704ceda3.pth", 463 | transforms=partial( 464 | ImageClassification, crop_size=224, resize_size=232, interpolation=InterpolationMode.BICUBIC 465 | ), 466 | meta={ 467 | **_COMMON_META, 468 | "num_params": 28288354, 469 | "min_size": (224, 224), 470 | "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#swintransformer", 471 | "_metrics": { 472 | "ImageNet-1K": { 473 | "acc@1": 81.474, 474 | "acc@5": 95.776, 475 | } 476 | }, 477 | "_docs": """These weights reproduce closely the results of the paper using a similar training recipe.""", 478 | }, 479 | ) 480 | DEFAULT = IMAGENET1K_V1 481 | 482 | 483 | class Swin_S_Weights(WeightsEnum): 484 | IMAGENET1K_V1 = Weights( 485 | url="https://download.pytorch.org/models/swin_s-5e29d889.pth", 486 | transforms=partial( 487 | ImageClassification, crop_size=224, resize_size=246, interpolation=InterpolationMode.BICUBIC 488 | ), 489 | meta={ 490 | **_COMMON_META, 491 | "num_params": 49606258, 492 | "min_size": (224, 224), 493 | "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#swintransformer", 494 | "_metrics": { 495 | "ImageNet-1K": { 496 | "acc@1": 83.196, 497 | "acc@5": 96.360, 498 | } 499 | }, 500 | "_docs": """These weights reproduce closely the results of the paper using a similar training recipe.""", 501 | }, 502 | ) 503 | DEFAULT = IMAGENET1K_V1 504 | 505 | 506 | class Swin_B_Weights(WeightsEnum): 507 | IMAGENET1K_V1 = Weights( 508 | url="https://download.pytorch.org/models/swin_b-68c6b09e.pth", 509 | transforms=partial( 510 | ImageClassification, crop_size=224, resize_size=238, interpolation=InterpolationMode.BICUBIC 511 | ), 512 | meta={ 513 | **_COMMON_META, 514 | "num_params": 87768224, 515 | "min_size": (224, 224), 516 | "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#swintransformer", 517 | "_metrics": { 518 | "ImageNet-1K": { 519 | "acc@1": 83.582, 520 | "acc@5": 96.640, 521 | } 522 | }, 523 | "_docs": """These weights reproduce closely the results of the paper using a similar training recipe.""", 524 | }, 525 | ) 526 | DEFAULT = IMAGENET1K_V1 527 | 528 | 529 | def swin_t(*, weights: Optional[Swin_T_Weights] = None, progress: bool = True, **kwargs: Any) -> SwinTransformer: 530 | """ 531 | Constructs a swin_tiny architecture from 532 | `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows `_. 533 | 534 | Args: 535 | weights (:class:`~torchvision.models.Swin_T_Weights`, optional): The 536 | pretrained weights to use. See 537 | :class:`~torchvision.models.Swin_T_Weights` below for 538 | more details, and possible values. By default, no pre-trained 539 | weights are used. 540 | progress (bool, optional): If True, displays a progress bar of the 541 | download to stderr. Default is True. 542 | **kwargs: parameters passed to the ``torchvision.models.swin_transformer.SwinTransformer`` 543 | base class. Please refer to the `source code 544 | `_ 545 | for more details about this class. 546 | 547 | .. autoclass:: torchvision.models.Swin_T_Weights 548 | :members: 549 | """ 550 | weights = Swin_T_Weights.verify(weights) 551 | 552 | return _swin_transformer( 553 | patch_size=[4, 4], 554 | embed_dim=96, 555 | depths=[2, 2, 6], 556 | num_heads=[3, 6, 12], 557 | window_size=[7, 7], 558 | stochastic_depth_prob=0.2, 559 | weights=weights, 560 | progress=progress, 561 | **kwargs, 562 | ) 563 | 564 | 565 | def swin_s(*, weights: Optional[Swin_S_Weights] = None, progress: bool = True, **kwargs: Any) -> SwinTransformer: 566 | """ 567 | Constructs a swin_small architecture from 568 | `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows `_. 569 | 570 | Args: 571 | weights (:class:`~torchvision.models.Swin_S_Weights`, optional): The 572 | pretrained weights to use. See 573 | :class:`~torchvision.models.Swin_S_Weights` below for 574 | more details, and possible values. By default, no pre-trained 575 | weights are used. 576 | progress (bool, optional): If True, displays a progress bar of the 577 | download to stderr. Default is True. 578 | **kwargs: parameters passed to the ``torchvision.models.swin_transformer.SwinTransformer`` 579 | base class. Please refer to the `source code 580 | `_ 581 | for more details about this class. 582 | 583 | .. autoclass:: torchvision.models.Swin_S_Weights 584 | :members: 585 | """ 586 | weights = Swin_S_Weights.verify(weights) 587 | 588 | return _swin_transformer( 589 | patch_size=[4, 4], 590 | embed_dim=96, 591 | depths=[2, 2, 18], 592 | num_heads=[3, 6, 12], 593 | window_size=[7, 7], 594 | stochastic_depth_prob=0.3, 595 | weights=weights, 596 | progress=progress, 597 | **kwargs, 598 | ) 599 | 600 | 601 | def swin_b(*, weights: Optional[Swin_B_Weights] = None, progress: bool = True, **kwargs: Any) -> SwinTransformer: 602 | """ 603 | Constructs a swin_base architecture from 604 | `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows `_. 605 | 606 | Args: 607 | weights (:class:`~torchvision.models.Swin_B_Weights`, optional): The 608 | pretrained weights to use. See 609 | :class:`~torchvision.models.Swin_B_Weights` below for 610 | more details, and possible values. By default, no pre-trained 611 | weights are used. 612 | progress (bool, optional): If True, displays a progress bar of the 613 | download to stderr. Default is True. 614 | **kwargs: parameters passed to the ``torchvision.models.swin_transformer.SwinTransformer`` 615 | base class. Please refer to the `source code 616 | `_ 617 | for more details about this class. 618 | 619 | .. autoclass:: torchvision.models.Swin_B_Weights 620 | :members: 621 | """ 622 | weights = Swin_B_Weights.verify(weights) 623 | 624 | return _swin_transformer( 625 | patch_size=[4, 4], 626 | embed_dim=128, 627 | depths=[2, 2, 18], 628 | num_heads=[4, 8, 16], 629 | window_size=[7, 7], 630 | stochastic_depth_prob=0.5, 631 | weights=weights, 632 | progress=progress, 633 | **kwargs, 634 | ) 635 | -------------------------------------------------------------------------------- /models/clustering/pair_wise_distance.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils.cpp_extension import load_inline 3 | from .pair_wise_distance_cuda_source import source 4 | 5 | print("compiling cuda source of 'pair_wise_distance' function...") 6 | pair_wise_distance_cuda = load_inline( 7 | "pair_wise_distance", cpp_sources="", cuda_sources=source 8 | ) 9 | print("compiling done") 10 | 11 | 12 | class PairwiseDistFunction(torch.autograd.Function): 13 | @staticmethod 14 | def forward(self, pixel_features, deep_feature, sdf_feature, spixel_features, init_spixel_indices, num_spixels_width, num_spixels_height): 15 | self.num_spixels_width = num_spixels_width 16 | self.num_spixels_height = num_spixels_height 17 | output = pixel_features.new(pixel_features.shape[0], 9, pixel_features.shape[-1]).zero_() 18 | self.save_for_backward(pixel_features, deep_feature, sdf_feature, spixel_features, init_spixel_indices) 19 | 20 | return pair_wise_distance_cuda.forward( 21 | pixel_features.contiguous(), deep_feature.contiguous(), sdf_feature.contiguous(), spixel_features.contiguous(), 22 | init_spixel_indices.contiguous(), output, 23 | self.num_spixels_width, self.num_spixels_height 24 | ) 25 | 26 | @staticmethod 27 | def backward(self, dist_matrix_grad): 28 | pixel_features, deep_feature, sdf_feature, spixel_features, init_spixel_indices = self.saved_tensors 29 | 30 | pixel_features_grad = torch.zeros_like(pixel_features) 31 | spixel_features_grad = torch.zeros_like(spixel_features) 32 | 33 | pixel_features_grad, spixel_features_grad = pair_wise_distance_cuda.backward( 34 | dist_matrix_grad.contiguous(), pixel_features.contiguous(), 35 | deep_feature.contiguous(), sdf_feature.contiguous(), 36 | spixel_features.contiguous(), init_spixel_indices.contiguous(), 37 | pixel_features_grad, spixel_features_grad, 38 | self.num_spixels_width, self.num_spixels_height 39 | ) 40 | return pixel_features_grad, None, None, spixel_features_grad, None, None, None 41 | 42 | -------------------------------------------------------------------------------- /models/clustering/pair_wise_distance_cuda_source.py: -------------------------------------------------------------------------------- 1 | source = ''' 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | #define CUDA_NUM_THREADS 256 8 | 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | template 16 | __global__ void forward_kernel( 17 | const scalar_t* __restrict__ pixel_features, 18 | const scalar_t* __restrict__ deep_features, 19 | const scalar_t* __restrict__ sdf_feature, 20 | const scalar_t* __restrict__ spixel_features, 21 | const scalar_t* __restrict__ spixel_indices, 22 | scalar_t* __restrict__ dist_matrix, 23 | int batchsize, int channels, int num_pixels, int num_spixels, 24 | int num_spixels_w, int num_spixels_h 25 | ){ 26 | int index = blockIdx.x * blockDim.x + threadIdx.x; 27 | if (index >= batchsize * num_pixels * 9) return; 28 | 29 | int cp = channels * num_pixels; 30 | int cs = channels * num_spixels; 31 | 32 | int b = index % batchsize; 33 | int spixel_offset = (index / batchsize) % 9; 34 | int p = (index / (batchsize * 9)) % num_pixels; 35 | 36 | int init_spix_index = spixel_indices[b * num_pixels + p]; 37 | 38 | int x_index = init_spix_index % num_spixels_w; 39 | int spixel_offset_x = (spixel_offset % 3 - 1); 40 | 41 | int y_index = init_spix_index / num_spixels_w; 42 | int spixel_offset_y = (spixel_offset / 3 - 1); 43 | 44 | if (x_index + spixel_offset_x < 0 || x_index + spixel_offset_x >= num_spixels_w) { 45 | dist_matrix[b * (9 * num_pixels) + spixel_offset * num_pixels + p] = 1e16; 46 | } 47 | else if (y_index + spixel_offset_y < 0 || y_index + spixel_offset_y >= num_spixels_h) { 48 | dist_matrix[b * (9 * num_pixels) + spixel_offset * num_pixels + p] = 1e16; 49 | } 50 | else { 51 | int query_spixel_index = init_spix_index + spixel_offset_x + num_spixels_w * spixel_offset_y; 52 | 53 | scalar_t sum_squared_diff = 0; 54 | for (int c=0; c<<< block, CUDA_NUM_THREADS >>>( 86 | pixel_features.data(), 87 | deep_features.data(), 88 | sdf_feature.data(), 89 | spixel_features.data(), 90 | spixel_indices.data(), 91 | dist_matrix.data(), 92 | batchsize, channels, num_pixels, 93 | num_spixels, num_spixels_w, num_spixels_h 94 | ); 95 | })); 96 | 97 | return dist_matrix; 98 | } 99 | 100 | template 101 | __global__ void backward_kernel( 102 | const scalar_t* __restrict__ dist_matrix_grad, 103 | const scalar_t* __restrict__ deep_features, 104 | const scalar_t* __restrict__ sdf_feature, 105 | const scalar_t* __restrict__ pixel_features, 106 | const scalar_t* __restrict__ spixel_features, 107 | const scalar_t* __restrict__ spixel_indices, 108 | scalar_t* __restrict__ pixel_feature_grad, 109 | scalar_t* __restrict__ spixel_feature_grad, 110 | int batchsize, int channels, int num_pixels, int num_spixels, 111 | int num_spixels_w, int num_spixels_h 112 | ){ 113 | int index = blockIdx.x * blockDim.x + threadIdx.x; 114 | if (index >= batchsize * num_pixels * 9) return; 115 | 116 | int cp = channels * num_pixels; 117 | int cs = channels * num_spixels; 118 | 119 | int b = index % batchsize; 120 | int spixel_offset = (index / batchsize) % 9; 121 | int p = (index / (batchsize * 9)) % num_pixels; 122 | 123 | int init_spix_index = spixel_indices[b * num_pixels + p]; 124 | 125 | int x_index = init_spix_index % num_spixels_w; 126 | int spixel_offset_x = (spixel_offset % 3 - 1); 127 | 128 | int y_index = init_spix_index / num_spixels_w; 129 | int spixel_offset_y = (spixel_offset / 3 - 1); 130 | 131 | if (x_index + spixel_offset_x < 0 || x_index + spixel_offset_x >= num_spixels_w) return; 132 | else if (y_index + spixel_offset_y < 0 || y_index + spixel_offset_y >= num_spixels_h) return; 133 | else { 134 | int query_spixel_index = init_spix_index + spixel_offset_x + num_spixels_w * spixel_offset_y; 135 | 136 | scalar_t dist_matrix_grad_val = dist_matrix_grad[b * (9 * num_pixels) + spixel_offset * num_pixels + p]; 137 | 138 | for (int c=0; c backward_cuda( 155 | const torch::Tensor dist_matrix_grad, 156 | const torch::Tensor deep_features, 157 | const torch::Tensor sdf_feature, 158 | const torch::Tensor pixel_features, 159 | const torch::Tensor spixel_features, 160 | const torch::Tensor spixel_indices, 161 | torch::Tensor pixel_features_grad, 162 | torch::Tensor spixel_features_grad, 163 | int num_spixels_w, int num_spixels_h 164 | ){ 165 | int batchsize = pixel_features.size(0); 166 | int channels = pixel_features.size(1); 167 | int num_pixels = pixel_features.size(2); 168 | int num_spixels = spixel_features.size(2); 169 | 170 | 171 | dim3 block((batchsize * 9 * num_pixels + CUDA_NUM_THREADS - 1) / CUDA_NUM_THREADS); 172 | 173 | AT_DISPATCH_FLOATING_TYPES(pixel_features_grad.type(), "backward_kernel", ([&] { 174 | backward_kernel<<< block, CUDA_NUM_THREADS >>>( 175 | dist_matrix_grad.data(), 176 | deep_features.data(), 177 | sdf_feature.data(), 178 | pixel_features.data(), 179 | spixel_features.data(), 180 | spixel_indices.data(), 181 | pixel_features_grad.data(), 182 | spixel_features_grad.data(), 183 | batchsize, channels, num_pixels, 184 | num_spixels, num_spixels_w, num_spixels_h 185 | ); 186 | })); 187 | 188 | return {pixel_features_grad, spixel_features_grad}; 189 | } 190 | 191 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { 192 | m.def("forward", &forward_cuda, "pair_wise_distance forward"); 193 | m.def("backward", &backward_cuda, "pair_wise_distance backward"); 194 | } 195 | 196 | ''' 197 | -------------------------------------------------------------------------------- /models/modeling.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from models.spectrum_pixel_clustering import Cluster 4 | from models.backbones.resnet import resnet18 5 | from models.backbones.swin import swin_t, Swin_T_Weights 6 | from models.backbones.PVTV2 import pvt_v2_b1 7 | import torch.nn.functional as F 8 | from models.VIT import ViT 9 | 10 | 11 | class spectral_derivate: 12 | def __init__(self): 13 | super().__init__() 14 | self.wavelength = torch.tensor([466, 480, 500, 520, 536, 550, 566, 580, 15 | 596, 610, 626, 640, 656, 670, 686, 700, 16 | 716, 730, 746, 760, 776, 790, 806, 820, 17 | 836, 850, 866, 880, 896, 910, 926, 940]) 18 | self.delta_n = self.wavelength[1:] - self.wavelength[:-1] 19 | 20 | def sdf(self, x): 21 | derivate = x[:, 1:, :, :] - x[:, :-1, :, :] 22 | if x.shape[1] == 32: 23 | delta_n = self.delta_n.to(x.device) 24 | return derivate / delta_n.view(1, -1, 1, 1) 25 | else: 26 | return derivate 27 | 28 | 29 | class Encoder(nn.Module): 30 | def __init__(self, backbone, in_channels, out_channels): 31 | super().__init__() 32 | if backbone == 'resnet18': 33 | self.encoder = resnet18(pretrained=True, in_channels=in_channels) 34 | channels = [64, 64, 128] 35 | elif backbone == 'pvtv2_b1': 36 | self.encoder = pvt_v2_b1( 37 | pre_trained_path="models/pre_trained/pvt_v2_b1.pth", 38 | in_chans=in_channels) 39 | channels = [64, 128, 320] 40 | elif backbone == 'swin_tiny': 41 | self.encoder = swin_t(weights=Swin_T_Weights.IMAGENET1K_V1, progress=True, in_channel=in_channels) 42 | channels = [96, 192, 384] 43 | self.up_sample = nn.ModuleDict({ 44 | 'up1': nn.Sequential( 45 | nn.Conv2d(channels[2], channels[1], kernel_size=1), 46 | nn.BatchNorm2d(channels[1]), 47 | nn.ReLU(inplace=True) 48 | ), 49 | 'up2': nn.Sequential( 50 | nn.Conv2d(channels[1] + channels[1], channels[0], kernel_size=1), 51 | nn.BatchNorm2d(channels[0]), 52 | nn.ReLU(inplace=True) 53 | ), 54 | 'up3': nn.Sequential( 55 | nn.Conv2d(channels[0] + channels[0], out_channels, kernel_size=1), 56 | nn.BatchNorm2d(out_channels), 57 | nn.ReLU(inplace=True) 58 | ) 59 | }) 60 | 61 | def forward(self, x): 62 | img_feat, res_feats = self.encoder(x) 63 | img_feat = F.interpolate(self.up_sample['up1'](img_feat), size=res_feats[1].shape[-2:], mode='nearest') 64 | img_feat = F.interpolate(self.up_sample['up2'](torch.cat([img_feat, res_feats[1]], dim=1)), 65 | size=res_feats[0].shape[-2:], mode='nearest') 66 | img_feat = F.interpolate(self.up_sample['up3'](torch.cat([img_feat, res_feats[0]], dim=1)), size=x.shape[-2:], 67 | mode='nearest') 68 | return img_feat 69 | 70 | 71 | class DSTC(nn.Module): 72 | def __init__(self, cfg): 73 | super().__init__() 74 | in_channels = cfg['in_channels'] 75 | classes = cfg['num_classes'] 76 | clus_dim = cfg['cluster']['dim'] 77 | self.cfg = cfg 78 | # UNet 79 | self.UNet = Encoder(backbone=cfg['backbone'], in_channels=in_channels, out_channels=clus_dim) 80 | # SDF operation 81 | self.sdf_op = spectral_derivate() 82 | # 83 | self.cluster = Cluster(in_channels, classes, 84 | proposal=cfg['cluster']['proposal'], fold=cfg['cluster']['fold'], hidden_dim=clus_dim) 85 | self.vit = ViT(image_size=cfg['img_size'], patch_size=cfg['vit']['patch_size'], num_classes=classes, 86 | dim=cfg['vit']['hidden_size'], depth=cfg['vit']['depth'], heads=cfg['vit']['num_heads'], 87 | mlp_dim=cfg['vit']['mlp_dim'], channels=clus_dim, dim_head=cfg['vit']['dim_head'], 88 | dropout=cfg['vit']['dropout'], emb_dropout=cfg['vit']['attention_dropout']) 89 | 90 | def forward(self, img, gt): 91 | """ 92 | img: [B, D, H, W] 93 | gt: [B, H, W] 94 | """ 95 | # UNet 96 | img_feat = self.UNet(img) 97 | # sdf 98 | sdf_data = self.sdf_op.sdf(img) # [B, D-1, H, W] 99 | # ----cluster---- 100 | center_feat, labels, spix_map = self.cluster(img, gt, img_feat, sdf_data) 101 | # ----vit classification---- 102 | vit_out = self.vit(center_feat) 103 | return vit_out, labels, spix_map 104 | -------------------------------------------------------------------------------- /models/pre_trained/pvt_v2_b1.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laprf/DSTC/765951e4952c129e914492220a79cb23d5a2a7b4/models/pre_trained/pvt_v2_b1.pth -------------------------------------------------------------------------------- /models/spectrum_pixel_clustering.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from einops import rearrange 4 | from models.clustering.pair_wise_distance import PairwiseDistFunction 5 | import torch.nn.functional as F 6 | 7 | 8 | @torch.no_grad() 9 | def get_abs_indices(init_label_map, num_spixels_width): 10 | """Get absolute indices for the initial label map.""" 11 | b, n_pixel = init_label_map.shape 12 | device = init_label_map.device 13 | r = torch.arange(-1, 2.0, device=device) 14 | relative_spix_indices = torch.cat([r - num_spixels_width, r, r + num_spixels_width], 0) 15 | 16 | abs_pix_indices = torch.arange(n_pixel, device=device)[None, None].repeat(b, 9, 1).reshape(-1).long() 17 | abs_spix_indices = (init_label_map[:, None] + relative_spix_indices[None, :, None]).reshape(-1).long() 18 | abs_batch_indices = torch.arange(b, device=device)[:, None, None].repeat(1, 9, n_pixel).reshape(-1).long() 19 | 20 | return torch.stack([abs_batch_indices, abs_spix_indices, abs_pix_indices], 0) 21 | 22 | 23 | def calc_init_centroid(images, num_spixels_width, num_spixels_height): 24 | """Calculate initial superpixels and return centroids and initial label map.""" 25 | batchsize, channels, height, width = images.shape 26 | device = images.device 27 | 28 | centroids = F.adaptive_avg_pool2d(images, (num_spixels_height, num_spixels_width)) 29 | 30 | with torch.no_grad(): 31 | num_spixels = num_spixels_width * num_spixels_height 32 | labels = torch.arange(num_spixels, device=device).reshape(1, 1, *centroids.shape[-2:]).type_as(centroids) 33 | init_label_map = F.interpolate(labels, size=(height, width), mode="nearest") 34 | init_label_map = init_label_map.repeat(batchsize, 1, 1, 1) 35 | 36 | init_label_map = init_label_map.reshape(batchsize, -1) 37 | centroids = centroids.reshape(batchsize, channels, -1) 38 | 39 | return centroids, init_label_map 40 | 41 | 42 | def ssn_iter(pixel_features, deep_feature, sdf_feature, proposals=None, n_iter=2): 43 | if proposals is None: 44 | proposals = [2, 2] 45 | num_spixels_height, num_spixels_width = proposals 46 | num_spixels = num_spixels_height * num_spixels_width 47 | 48 | spixel_features, init_label_map = calc_init_centroid(pixel_features, num_spixels_width, num_spixels_height) 49 | abs_indices = get_abs_indices(init_label_map, num_spixels_width) 50 | 51 | pixel_features = pixel_features.reshape(*pixel_features.shape[:2], -1) 52 | permuted_pixel_features = pixel_features.permute(0, 2, 1).contiguous() 53 | 54 | deep_feature = deep_feature.reshape(*deep_feature.shape[:2], -1) 55 | sdf_feature = sdf_feature.reshape(*sdf_feature.shape[:2], -1) 56 | 57 | with torch.no_grad(): 58 | for k in range(n_iter): 59 | if k < n_iter - 1: 60 | dist_matrix = PairwiseDistFunction.apply( 61 | pixel_features, deep_feature, sdf_feature, spixel_features, init_label_map, 62 | num_spixels_width, num_spixels_height 63 | ) 64 | 65 | affinity_matrix = (-dist_matrix).softmax(1) 66 | reshaped_affinity_matrix = affinity_matrix.reshape(-1) 67 | 68 | mask = (abs_indices[1] >= 0) * (abs_indices[1] < num_spixels) 69 | sparse_abs_affinity = torch.sparse_coo_tensor(abs_indices[:, mask], reshaped_affinity_matrix[mask]) 70 | 71 | abs_affinity = sparse_abs_affinity.to_dense().contiguous() 72 | spixel_features = torch.bmm(abs_affinity, permuted_pixel_features) \ 73 | / (abs_affinity.sum(2, keepdim=True) + 1e-16) 74 | 75 | spixel_features = spixel_features.permute(0, 2, 1).contiguous() 76 | else: 77 | dist_matrix = PairwiseDistFunction.apply( 78 | pixel_features, deep_feature, sdf_feature, spixel_features, init_label_map, 79 | num_spixels_width, num_spixels_height 80 | ) 81 | 82 | affinity_matrix = (-dist_matrix).softmax(1) 83 | reshaped_affinity_matrix = affinity_matrix.reshape(-1) 84 | 85 | mask = (abs_indices[1] >= 0) * (abs_indices[1] < num_spixels) 86 | sparse_abs_affinity = torch.sparse_coo_tensor(abs_indices[:, mask], reshaped_affinity_matrix[mask]) 87 | 88 | abs_affinity = sparse_abs_affinity.to_dense().contiguous() 89 | 90 | return abs_affinity, num_spixels 91 | 92 | 93 | class Cluster(nn.Module): 94 | def __init__(self, dim, num_classes, proposal=2, hidden_dim=96, fold=8): 95 | super().__init__() 96 | self.proposal = proposal 97 | self.fold = fold 98 | self.classes = num_classes 99 | 100 | self.f = nn.Conv2d(dim, hidden_dim, kernel_size=1) 101 | self.f_sdf1 = nn.Conv2d(dim - 1, hidden_dim, kernel_size=1) 102 | 103 | self.centers_proposal = nn.AdaptiveAvgPool2d((proposal, proposal)) 104 | 105 | self.init_weights() 106 | 107 | def cluster_forward(self, x, feat, sdf_data): 108 | W, H = x.shape[-2:] 109 | 110 | hsi_in = self.f(x) 111 | value = feat 112 | 113 | b0, c0, w0, h0 = hsi_in.shape 114 | assert w0 % self.fold == 0 and h0 % self.fold == 0, \ 115 | f"Ensure the feature map size ({w0}*{h0}) can be divided by fold {self.fold}*{self.fold}" 116 | hsi_in = rearrange(hsi_in, "b c (f1 w) (f2 h) -> (b f1 f2) c w h", f1=self.fold, f2=self.fold) 117 | value = rearrange(value, "b c (f1 w) (f2 h) -> (b f1 f2) c w h", f1=self.fold, f2=self.fold) 118 | sdf_data = self.f_sdf1(sdf_data) 119 | sdf_data = rearrange(sdf_data, "b c (f1 w) (f2 h) -> (b f1 f2) c w h", f1=self.fold, f2=self.fold) 120 | 121 | value_centers = self.centers_proposal(value) 122 | 123 | sim, _ = ssn_iter(value, hsi_in, sdf_data, 124 | proposals=[self.proposal, self.proposal], n_iter=2) 125 | # [B,M,N] 每一个中心点与每一个像素点的相似度,M为中心点个数,N为像素点个数 126 | 127 | # we use mask to sololy assign each point to one center 128 | _, sim_max_idx = sim.max(dim=1, keepdim=True) # sim_max_idx: [B,1,N] 找到每一个像素点对应的最大相似度的中心点的索引 129 | mask = torch.zeros_like(sim) # binary #[B,M,N] 130 | mask.scatter_(1, sim_max_idx, 1.) # binary [B,M,N] 在mask上将每一个像素点对应的最大相似度的中心点的索引处置为1 131 | sim = sim * mask 132 | value2 = rearrange(value, 'b c w h -> b (w h) c') # [B,N,C] 133 | 134 | # aggregate step, out shape [B,M,C] 135 | value_centers = rearrange(value_centers, 'b c w h -> b (w h) c') # [B,C_W*C_H,C] 136 | out = ((value2.unsqueeze(dim=1) * sim.unsqueeze(dim=-1)).sum(dim=2) + value_centers) / ( 137 | sim.sum(dim=-1, keepdim=True) + 1.0) # [B,M,C] 138 | 139 | out = rearrange(out, "(b f1 f2) (p1 p2) c -> b (f1 f2 p1 p2) c", f1=self.fold, f2=self.fold, p1=self.proposal, 140 | p2=self.proposal) 141 | mask = rearrange(mask, "(b f1 f2) m (w h) -> b (f1 f2) m w h", f1=self.fold, f2=self.fold, w=W // self.fold, 142 | h=H // self.fold) 143 | return mask, out 144 | 145 | def gen_labels(self, gt, mask): 146 | B, W, H = gt.shape 147 | f, p = self.fold, self.proposal 148 | 149 | # ------- gen spix_map ------ 150 | coef = torch.arange(0, p * p, dtype=mask.dtype, device=mask.device).view(1, 1, p * p, 1, 1) 151 | small_imgs = (mask * coef).sum(dim=2) 152 | bias = torch.arange(0, f * f * p * p, step=p * p, dtype=mask.dtype, device=mask.device).view(1, f * f, 1, 1) 153 | small_imgs = small_imgs + bias 154 | spix_map = rearrange(small_imgs, "b (f1 f2) w h -> b (f1 w) (f2 h)", f1=f, f2=f) 155 | 156 | # ------- gen labels ------ 157 | gt_reshape = rearrange(gt, "b (f1 w) (f2 h) -> b (f1 f2) w h", f1=f, f2=f) + 1 158 | # 将gt_reshape与mask相乘, 159 | gt_filt = gt_reshape.unsqueeze(2) * mask # Shape: [b, f*f, p*p, w, h] 160 | # 将gt_filt展平 161 | gt_filt = gt_filt.flatten(start_dim=3).long() # Shape: [b, f*f, p*p, w*h] 162 | 163 | # 计算gt_filt沿最后一个维度上,每一个数值的个数 164 | count = torch.zeros((B, f * f, p * p, self.classes + 1), dtype=torch.long, 165 | device=gt.device) # Shape: [b, f*f, p*p, classes + 1] 166 | count.scatter_add_(dim=3, index=gt_filt, src=torch.ones_like(gt_filt)) # Shape: [b, f*f, p*p, classes + 1] 167 | labels = rearrange(count[..., 1:], "b (f1 f2) (p1 p2) c-> b c (f1 f2 p1 p2)", f1=f, f2=f, p1=p, 168 | p2=p).float() # Shape: [b, c, f*p*f*p] 169 | return labels, spix_map 170 | 171 | def forward(self, x, gt, feat, sdf_data): 172 | mask, center_feat = self.cluster_forward(x, feat, sdf_data) 173 | with torch.no_grad(): 174 | labels, spix_map = self.gen_labels(gt, mask) 175 | return center_feat, labels, spix_map 176 | 177 | def init_weights(self): 178 | for m in self.modules(): 179 | if isinstance(m, nn.Conv2d): 180 | nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') 181 | if m.bias is not None: 182 | nn.init.constant_(m.bias, 0) 183 | elif isinstance(m, nn.BatchNorm2d): 184 | nn.init.constant_(m.weight, 1) 185 | nn.init.constant_(m.bias, 0) 186 | elif isinstance(m, nn.Linear): 187 | nn.init.normal_(m.weight, 0, 0.01) 188 | -------------------------------------------------------------------------------- /models/yamls/PVTV2.yaml: -------------------------------------------------------------------------------- 1 | img_size: 512 2 | num_classes: 24 3 | in_channels: 32 4 | cluster: 5 | fold: 16 6 | proposal: 2 7 | dim: 96 8 | vit: 9 | hidden_size: 256 10 | patch_size: 1 11 | depth: 4 12 | num_heads: 4 13 | dim_head: 64 14 | mlp_dim: 1024 15 | dropout: 0.1 16 | attention_dropout: 0.1 17 | backbone: 'pvtv2_b1' 18 | -------------------------------------------------------------------------------- /models/yamls/resnet.yaml: -------------------------------------------------------------------------------- 1 | img_size: 512 2 | num_classes: 24 3 | in_channels: 32 4 | cluster: 5 | fold: 16 6 | proposal: 2 7 | dim: 96 8 | vit: 9 | hidden_size: 256 10 | patch_size: 1 11 | depth: 4 12 | num_heads: 4 13 | dim_head: 64 14 | mlp_dim: 1024 15 | dropout: 0.1 16 | attention_dropout: 0.1 17 | backbone: 'resnet18' 18 | -------------------------------------------------------------------------------- /models/yamls/swin.yaml: -------------------------------------------------------------------------------- 1 | img_size: 512 2 | num_classes: 24 3 | in_channels: 32 4 | cluster: 5 | fold: 16 6 | proposal: 2 7 | dim: 96 8 | vit: 9 | hidden_size: 256 10 | patch_size: 1 11 | depth: 4 12 | num_heads: 4 13 | dim_head: 64 14 | mlp_dim: 1024 15 | dropout: 0.1 16 | attention_dropout: 0.1 17 | backbone: 'swin_tiny' 18 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | addict==2.4.0 2 | certifi==2024.7.4 3 | charset-normalizer==3.3.2 4 | contourpy==1.2.1 5 | cycler==0.12.1 6 | einops==0.8.0 7 | filelock==3.13.1 8 | fonttools==4.53.1 9 | fsspec==2024.2.0 10 | huggingface-hub==0.23.4 11 | idna==3.7 12 | imageio==2.34.2 13 | importlib_metadata==8.0.0 14 | importlib_resources==6.4.0 15 | Jinja2==3.1.3 16 | kiwisolver==1.4.5 17 | lazy_loader==0.4 18 | markdown-it-py==3.0.0 19 | MarkupSafe==2.1.5 20 | matplotlib==3.9.1 21 | mdurl==0.1.2 22 | mmcv==2.2.0 23 | mmengine==0.10.4 24 | mpmath==1.3.0 25 | networkx==3.2.1 26 | numpy==1.26.3 27 | nvidia-cublas-cu12==12.1.3.1 28 | nvidia-cuda-cupti-cu12==12.1.105 29 | nvidia-cuda-nvrtc-cu12==12.1.105 30 | nvidia-cuda-runtime-cu12==12.1.105 31 | nvidia-cudnn-cu12==8.9.2.26 32 | nvidia-cufft-cu12==11.0.2.54 33 | nvidia-curand-cu12==10.3.2.106 34 | nvidia-cusolver-cu12==11.4.5.107 35 | nvidia-cusparse-cu12==12.1.0.106 36 | nvidia-nccl-cu12==2.20.5 37 | nvidia-nvjitlink-cu12==12.1.105 38 | nvidia-nvtx-cu12==12.1.105 39 | opencv-python==4.8.1.78 40 | packaging==24.1 41 | pillow==10.2.0 42 | platformdirs==4.2.2 43 | protobuf==5.27.2 44 | Pygments==2.18.0 45 | pyparsing==3.1.2 46 | python-dateutil==2.9.0.post0 47 | PyYAML==6.0.1 48 | requests==2.32.3 49 | rich==13.7.1 50 | safetensors==0.4.3 51 | scikit-image==0.24.0 52 | scipy==1.13.1 53 | six==1.16.0 54 | sympy==1.12 55 | tensorboardX==2.6.2.2 56 | termcolor==2.4.0 57 | tifffile==2024.7.2 58 | timm==1.0.7 59 | tomli==2.0.1 60 | torch==2.3.0+cu121 61 | torchaudio==2.3.0+cu121 62 | torchvision==0.18.0+cu121 63 | tqdm==4.66.4 64 | triton==2.3.0 65 | typing_extensions==4.9.0 66 | urllib3==2.2.2 67 | yapf==0.40.2 68 | zipp==3.19.2 69 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import time 5 | 6 | import cv2 7 | import torch 8 | import yaml 9 | from tqdm import tqdm 10 | 11 | from dataset import load_data 12 | from models.modeling import DSTC 13 | from utils import gen_confusion_matrix, eval_metrics, map_to_color, set_seed 14 | 15 | 16 | def test(model, test_loader, device, save_path, num_classes, batch_size): 17 | with torch.no_grad(): 18 | model.eval() 19 | confusion_matrix = torch.zeros([num_classes, num_classes], device=device) 20 | total_time = [] 21 | 22 | for data, label, name in tqdm(test_loader): 23 | data, label = data.to(device), label.long().to(device) 24 | 25 | start_time = time.time() 26 | pred, _, spix_map = model(data, label) 27 | end_time = time.time() 28 | total_time.append(end_time - start_time) 29 | 30 | for i in range(pred.shape[0]): 31 | j = torch.arange(pred.shape[1]).cuda() 32 | mask = (spix_map[i].unsqueeze(-1) == j).float().cuda() 33 | 34 | sal_result = (mask @ pred[i]).argmax(dim=-1) 35 | sal_result_img = map_to_color(sal_result.cpu().numpy(), label[i].cpu().numpy()) 36 | cv2.imwrite(os.path.join(save_path, f"{name[i]}.jpg"), sal_result_img) 37 | 38 | sal_gt = map_to_color(label[i].cpu().numpy()) 39 | cv2.imwrite(os.path.join(save_path, f"{name[i]}_gt.jpg"), sal_gt) 40 | 41 | confusion_matrix_tmp = gen_confusion_matrix(num_classes, sal_result, label[i]) 42 | confusion_matrix += confusion_matrix_tmp 43 | 44 | avg_time = sum(total_time) / len(total_time) 45 | print("Average time: ", avg_time) 46 | print("FPS: ", batch_size / avg_time) 47 | 48 | confusion_matrix = confusion_matrix.cpu().detach().numpy() 49 | return confusion_matrix 50 | 51 | 52 | def main(): 53 | parser = argparse.ArgumentParser(description='PyTorch WHU_OHS Dataset Test') 54 | parser.add_argument('--config', default='models/yamls/resnet.yaml', type=str, 55 | help='Config file path (default: config.yaml)') 56 | parser.add_argument('--log_path', default='logs/resnet.log', type=str, help='Log path') 57 | parser.add_argument('--data_root', default='', type=str, help='Data root') 58 | parser.add_argument('--batch_size', default=8, type=int, help='Mini-batch size (default: 8)') 59 | parser.add_argument('--pretrained_model', default='DataStorage/resnet/best_model.pth', type=str, 60 | help='Pretrained model path') 61 | args = parser.parse_args() 62 | 63 | cfg = yaml.safe_load(open(args.config, 'r')) 64 | device = torch.device('cuda') 65 | num_classes = cfg['num_classes'] 66 | 67 | save_path = os.path.join(os.path.dirname(args.pretrained_model), 'test') 68 | os.makedirs(save_path, exist_ok=True) 69 | os.makedirs('./logs/', exist_ok=True) 70 | 71 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 72 | set_seed(233) 73 | 74 | logging.basicConfig(filename=args.log_path, level=logging.INFO, 75 | format='%(asctime)s - %(levelname)s: %(message)s') 76 | 77 | model = DSTC(cfg) 78 | if torch.cuda.device_count() > 1: 79 | model = torch.nn.DataParallel(model) 80 | model.cuda() 81 | model.load_state_dict(torch.load(args.pretrained_model), strict=False) 82 | 83 | test_loader = load_data(args, 'ts') 84 | confusion_matrix = test(model, test_loader, device, save_path, num_classes, args.batch_size) 85 | mean_f1, oa, kappa, miou, f1 = eval_metrics(confusion_matrix, mode='ts') 86 | 87 | print('mean_F1: {:.4f}, OA: {:.4f}, Kappa: {:.4f}, mIoU: {:.4f}'.format(mean_f1, oa, kappa, miou)) 88 | print('class F1: ', f1) 89 | 90 | logging.info('mean_F1: {:.4f}, OA: {:.4f}, Kappa: {:.4f}, mIoU: {:.4f}'.format(mean_f1, oa, kappa, miou)) 91 | logging.info(f'class F1: {f1}') 92 | 93 | 94 | if __name__ == '__main__': 95 | main() 96 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | 3 | BACKBONE='resnet' # resnet, PVTV2, swin 4 | DATA_ROOT='data/' 5 | 6 | if [ $BACKBONE != 'resnet' ] && [ $BACKBONE != 'PVTV2' ] && [ $BACKBONE != 'swin' ]; then 7 | echo "backbone only supports ResNet, PVTV2, and Swin!" 8 | exit 1 9 | fi 10 | 11 | CUDA_VISIBLE_DEVICES=0 python test.py --config=models/yamls/$BACKBONE.yaml \ 12 | --log_path=logs/$BACKBONE.log \ 13 | --data_root=$DATA_ROOT \ 14 | --pretrained_model=DataStorage/$BACKBONE/best_model.pth 15 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime 3 | import logging 4 | import os 5 | 6 | import torch 7 | import yaml 8 | from tensorboardX import SummaryWriter 9 | from tqdm import tqdm 10 | 11 | from dataset import load_data 12 | from models.modeling import DSTC 13 | from utils import set_seed, gen_confusion_matrix, eval_metrics 14 | 15 | 16 | def train(model, optimizer, criterion, train_loader): 17 | model.train() 18 | confusion_matrix = torch.zeros([NUM_CLASSES, NUM_CLASSES]).cuda() 19 | 20 | for data, label, name in train_loader: 21 | data, label = data.cuda(), label.long().cuda() 22 | optimizer.zero_grad() 23 | 24 | pred, train_label, spix_map = model(data, label) 25 | loss = criterion(pred.permute(0, 2, 1), train_label) 26 | loss.backward() 27 | optimizer.step() 28 | 29 | for i in range(pred.shape[0]): 30 | j = torch.arange(pred.shape[1]).cuda() 31 | mask = (spix_map[i].unsqueeze(-1) == j).float().cuda() 32 | 33 | sal_result = (mask @ pred[i]).argmax(dim=-1) 34 | confusion_matrix_tmp = gen_confusion_matrix(NUM_CLASSES, sal_result, label[i]) 35 | confusion_matrix += confusion_matrix_tmp 36 | 37 | confusion_matrix = confusion_matrix.cpu().detach().numpy() 38 | return eval_metrics(confusion_matrix, mode='tr') 39 | 40 | 41 | def valid(model, val_loader): 42 | with torch.no_grad(): 43 | model.eval() 44 | confusionmat = torch.zeros([NUM_CLASSES, NUM_CLASSES]).cuda() 45 | 46 | for data, label, name in val_loader: 47 | data, label = data.cuda(), label.long().cuda() 48 | pred, _, spix_map = model(data, label) 49 | 50 | for i in range(pred.shape[0]): 51 | j = torch.arange(pred.shape[1]).cuda() 52 | mask = (spix_map[i].unsqueeze(-1) == j).float().cuda() 53 | 54 | sal_result = (mask @ pred[i]).argmax(dim=-1) 55 | confusionmat_tmp = gen_confusion_matrix(NUM_CLASSES, sal_result, label[i]) 56 | confusionmat = confusionmat + confusionmat_tmp 57 | 58 | confusionmat = confusionmat.cpu().detach().numpy() 59 | return eval_metrics(confusionmat, mode='val') 60 | 61 | 62 | def main(): 63 | min_f1 = 0 64 | 65 | model = DSTC(cfg) 66 | if torch.cuda.device_count() > 1: 67 | model = torch.nn.DataParallel(model) 68 | model.cuda() 69 | 70 | spix_nums = ( 71 | cfg['cluster']['proposal'] * cfg['cluster']['proposal'] * 72 | cfg['cluster']['fold'] * cfg['cluster']['fold'] 73 | ) 74 | print(f"number of superpixels: {spix_nums}") 75 | print(f"pixels per superpixels: {int(cfg['img_size'] * cfg['img_size'] / spix_nums)}") 76 | 77 | optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, betas=(0.9, 0.999), eps=1e-8, weight_decay=0.0001) 78 | criterion = torch.nn.CrossEntropyLoss(ignore_index=-1) 79 | scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.epoch_num, eta_min=0.1 * args.lr) 80 | 81 | train_loader = load_data(args, 'tr') 82 | val_loader = load_data(args, 'val') 83 | 84 | with tqdm(total=args.epoch_num) as pbar: 85 | for epoch in range(args.epoch_num): 86 | train_f1 = train(model, optimizer, criterion, train_loader) 87 | val_f1 = valid(model, val_loader) 88 | if val_f1 > min_f1: 89 | min_f1 = val_f1 90 | torch.save(model.state_dict(), f"DataStorage/{args.exp_name}/best_model.pth") 91 | scheduler.step() 92 | sw.add_scalar('f1/train', train_f1, epoch) 93 | sw.add_scalar('f1/val', val_f1, epoch) 94 | sw.add_scalar('lr', optimizer.state_dict()['param_groups'][0]['lr'], epoch) 95 | pbar.update(1) 96 | logger.info('Epoch: %d, train F1: %.4f, val F1: %.4f' % (epoch, train_f1, val_f1)) 97 | 98 | 99 | if __name__ == '__main__': 100 | parser = argparse.ArgumentParser(description='PyTorch WHU_OHS') 101 | parser.add_argument('--config', default='models/yamls/resnet.yaml', type=str, 102 | help='config file path (default: config.yaml)') 103 | parser.add_argument('--exp_name', type=str, help='exp name', required=True) 104 | # Dataset 105 | parser.add_argument('--data_root', default='', type=str, help='data root') 106 | # Training 107 | parser.add_argument('--batch_size', default=8, type=int, help='mini-batch size (default: 4)') 108 | parser.add_argument('--epoch_num', default=100, type=int, help='epoch number (default: 200)') 109 | parser.add_argument('--lr', default=5e-4, type=float, help='initial learning rate (default: 2e-4)') 110 | args = parser.parse_args() 111 | cfg = yaml.load(open(args.config, 'r'), Loader=yaml.FullLoader) 112 | 113 | os.makedirs(f"DataStorage/{args.exp_name}/valid", exist_ok=True) 114 | os.makedirs("logs", exist_ok=True) 115 | 116 | NUM_CLASSES = cfg['num_classes'] 117 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 118 | set_seed(233) 119 | 120 | if args.exp_name == '': 121 | log_filename = f'logs/{datetime.datetime.now().strftime("%Y%m%d%H%M%S")}.log' 122 | else: 123 | log_filename = f'logs/{args.exp_name}.log' 124 | logging.basicConfig(filename=log_filename, level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s') 125 | logger = logging.getLogger('training_logger') 126 | 127 | logger.info(cfg) 128 | logger.info(args) 129 | sw = SummaryWriter(log_dir=f'runs/{args.exp_name}') 130 | main() 131 | -------------------------------------------------------------------------------- /train_and_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | 3 | BACKBONE='resnet' # resnet, PVTV2, swin 4 | DATA_ROOT='data/' 5 | 6 | if [ $BACKBONE != 'resnet' ] && [ $BACKBONE != 'PVTV2' ] && [ $BACKBONE != 'swin' ]; then 7 | echo "backbone only supports ResNet, PVTV2, and Swin!" 8 | exit 1 9 | fi 10 | 11 | echo "Using $BACKBONE as backbone, now training..." 12 | CUDA_VISIBLE_DEVICES=1 python train.py --config=models/yamls/$BACKBONE.yaml \ 13 | --exp_name=$BACKBONE backbone \ 14 | --log_path=logs/$BACKBONE.log \ 15 | --data_root=$DATA_ROOT 16 | 17 | echo "Done training on $BACKBONE, now testing..." 18 | CUDA_VISIBLE_DEVICES=1 python test.py --config=models/yamls/$BACKBONE.yaml \ 19 | --log_path=logs/$BACKBONE.log \ 20 | --data_root=$DATA_ROOT \ 21 | --pretrained_model=DataStorage/$BACKBONE/best_model.pth -------------------------------------------------------------------------------- /txts/tr.txt: -------------------------------------------------------------------------------- 1 | O15_0023 2 | O4_0092 3 | T6_0019 4 | O15_0022 5 | O20_0136 6 | S7_0115 7 | O22_0121 8 | O10_0071 9 | O8_0110 10 | O19_0034 11 | O18_0026 12 | S1_0075 13 | O7_0108 14 | S4_0010 15 | O15_0118 16 | S4_0078 17 | T5_0134 18 | O12_0108 19 | S4_0091 20 | O6_0096 21 | T3_0131 22 | O20_0027 23 | O10_0129 24 | O15_0004 25 | S2_0110 26 | O4_0030 27 | O8_0045 28 | S1_0064 29 | T8_0104 30 | O8_0024 31 | S8_0045 32 | S8_0024 33 | O22_0051 34 | O12_0016 35 | O15_0127 36 | O15_0070 37 | S5_0086 38 | S2_0036 39 | O23_0031 40 | O20_0098 41 | O10_0054 42 | O26_0011 43 | S5_0085 44 | O18_0017 45 | S5_0083 46 | T7_0098 47 | O23_0107 48 | O7_0138 49 | O12_0109 50 | O9_0045 51 | T7_0012 52 | O21_0081 53 | O9_0019 54 | O16_0058 55 | S8_0046 56 | O24_0024 57 | O15_0102 58 | S7_0014 59 | O18_0078 60 | O12_0042 61 | T3_0067 62 | T5_0133 63 | O18_0079 64 | O22_0111 65 | O16_0112 66 | O11_0075 67 | S3_0117 68 | T2_0126 69 | T6_0084 70 | O2_0058 71 | S4_0066 72 | S2_0003 73 | O16_0113 74 | O15_0088 75 | O17_0025 76 | O17_0012 77 | O15_0027 78 | O3_0130 79 | T2_0066 80 | S2_0061 81 | O19_0099 82 | S6_0060 83 | O9_0007 84 | O5_0009 85 | O2_0005 86 | O8_0015 87 | S4_0023 88 | O6_0021 89 | O26_0098 90 | O6_0129 91 | S4_0089 92 | T2_0047 93 | O5_0052 94 | O5_0152 95 | O12_0070 96 | O21_0116 97 | O13_0110 98 | O24_0020 99 | O23_0069 100 | O18_0113 101 | T3_0082 102 | T1_0011 103 | O22_0006 104 | T3_0054 105 | T6_0098 106 | O20_0060 107 | S5_0075 108 | O26_0034 109 | O6_0054 110 | O22_0089 111 | O5_0080 112 | O14_0067 113 | T2_0088 114 | O25_0029 115 | T1_0004 116 | S1_0041 117 | O16_0066 118 | O20_0025 119 | T6_0076 120 | T6_0009 121 | S3_0071 122 | T1_0125 123 | O23_0030 124 | O18_0005 125 | T2_0111 126 | O14_0075 127 | T3_0050 128 | S6_0119 129 | T1_0030 130 | O2_0115 131 | O6_0087 132 | T3_0130 133 | O26_0053 134 | O12_0053 135 | S3_0120 136 | S8_0050 137 | O7_0018 138 | O20_0050 139 | S3_0008 140 | T4_0071 141 | O23_0105 142 | S8_0057 143 | O18_0013 144 | T2_0099 145 | S6_0128 146 | O14_0118 147 | O6_0057 148 | T2_0054 149 | O4_0071 150 | O26_0021 151 | O25_0059 152 | S7_0086 153 | O6_0107 154 | O18_0044 155 | O1_0048 156 | O15_0111 157 | S8_0113 158 | O7_0011 159 | O8_0117 160 | O8_0054 161 | T2_0109 162 | O11_0119 163 | O17_0018 164 | T2_0056 165 | O21_0067 166 | T7_0047 167 | O10_0096 168 | S7_0017 169 | O2_0036 170 | O12_0014 171 | T1_0138 172 | O19_0078 173 | O26_0049 174 | O16_0129 175 | T2_0029 176 | T1_0117 177 | O1_0040 178 | O7_0049 179 | S7_0078 180 | O4_0041 181 | O4_0091 182 | O16_0054 183 | O23_0024 184 | T3_0043 185 | O10_0022 186 | O25_0125 187 | O11_0078 188 | O14_0023 189 | T2_0065 190 | O20_0079 191 | O22_0082 192 | O6_0016 193 | O12_0034 194 | S1_0011 195 | O3_0048 196 | O8_0056 197 | O11_0100 198 | T1_0130 199 | O12_0002 200 | O20_0048 201 | O23_0111 202 | S4_0088 203 | O4_0049 204 | O9_0078 205 | O21_0102 206 | T7_0114 207 | O20_0109 208 | S3_0134 209 | T3_0088 210 | O10_0007 211 | O15_0024 212 | O11_0041 213 | T3_0053 214 | O3_0069 215 | O13_0121 216 | O20_0038 217 | O25_0019 218 | T6_0062 219 | S2_0066 220 | O13_0052 221 | O5_0109 222 | O23_0117 223 | S2_0073 224 | O7_0039 225 | O9_0119 226 | O26_0063 227 | T5_0019 228 | T8_0042 229 | O1_0131 230 | O10_0112 231 | O25_0050 232 | S4_0022 233 | O13_0082 234 | O5_0019 235 | T8_0011 236 | T2_0103 237 | O18_0030 238 | O7_0053 239 | O8_0107 240 | O21_0114 241 | O7_0026 242 | O21_0071 243 | O16_0086 244 | S3_0006 245 | T8_0108 246 | O1_0050 247 | O13_0025 248 | T7_0048 249 | S1_0109 250 | O9_0108 251 | O26_0029 252 | O16_0008 253 | T7_0042 254 | O1_0017 255 | O25_0052 256 | O17_0051 257 | O7_0046 258 | O5_0147 259 | T6_0131 260 | O5_0101 261 | S6_0094 262 | O6_0122 263 | T4_0018 264 | T4_0050 265 | O26_0088 266 | O24_0023 267 | O25_0034 268 | O5_0088 269 | S8_0039 270 | O22_0101 271 | O2_0114 272 | O7_0009 273 | T3_0136 274 | O6_0017 275 | S1_0095 276 | O11_0012 277 | S8_0018 278 | O8_0063 279 | T7_0076 280 | T5_0096 281 | O4_0075 282 | T5_0059 283 | O6_0123 284 | T7_0108 285 | O18_0119 286 | O23_0097 287 | S8_0086 288 | T4_0128 289 | O2_0030 290 | O25_0008 291 | O25_0036 292 | O9_0087 293 | O19_0008 294 | T2_0023 295 | S1_0059 296 | O23_0041 297 | O1_0128 298 | O18_0074 299 | O20_0012 300 | O15_0037 301 | T4_0013 302 | S1_0093 303 | O1_0020 304 | T3_0069 305 | S3_0077 306 | O25_0085 307 | O12_0021 308 | O21_0021 309 | S4_0121 310 | O7_0069 311 | O11_0111 312 | T3_0119 313 | O17_0026 314 | O20_0088 315 | O18_0117 316 | T7_0125 317 | T8_0028 318 | O5_0026 319 | T1_0031 320 | O14_0094 321 | S2_0084 322 | O10_0004 323 | S2_0079 324 | S4_0014 325 | T3_0116 326 | O18_0094 327 | S5_0026 328 | T8_0088 329 | O16_0124 330 | O19_0138 331 | T4_0016 332 | T1_0097 333 | T1_0143 334 | O22_0104 335 | T7_0074 336 | O20_0015 337 | O25_0025 338 | S3_0043 339 | O23_0022 340 | T2_0042 341 | O20_0072 342 | O13_0093 343 | O26_0050 344 | S1_0006 345 | O1_0041 346 | T2_0133 347 | O4_0113 348 | O1_0096 349 | O12_0074 350 | S2_0014 351 | O5_0033 352 | O25_0101 353 | O11_0053 354 | S2_0106 355 | O26_0089 356 | O10_0104 357 | O19_0017 358 | O8_0138 359 | T1_0010 360 | S5_0012 361 | T7_0020 362 | T3_0075 363 | O19_0044 364 | S7_0043 365 | O24_0033 366 | O17_0034 367 | O7_0092 368 | O2_0118 369 | O18_0121 370 | S7_0005 371 | O21_0020 372 | O8_0124 373 | O16_0101 374 | O8_0011 375 | T7_0140 376 | O17_0112 377 | S8_0023 378 | T6_0096 379 | S7_0034 380 | O24_0022 381 | O13_0032 382 | O8_0106 383 | O8_0019 384 | O1_0102 385 | S5_0120 386 | O7_0047 387 | T6_0032 388 | O19_0024 389 | O24_0030 390 | O7_0010 391 | S1_0051 392 | T2_0091 393 | S5_0056 394 | S6_0041 395 | O18_0110 396 | S4_0075 397 | S4_0092 398 | T6_0057 399 | T2_0049 400 | T4_0084 401 | O15_0051 402 | T6_0016 403 | T5_0050 404 | O3_0080 405 | O3_0101 406 | S4_0051 407 | S7_0106 408 | O18_0029 409 | O14_0029 410 | O5_0028 411 | S3_0088 412 | O26_0027 413 | O21_0074 414 | T6_0085 415 | O20_0101 416 | O1_0116 417 | T5_0085 418 | O17_0106 419 | O22_0055 420 | O5_0061 421 | O12_0067 422 | O26_0064 423 | T3_0094 424 | S5_0035 425 | O5_0067 426 | T2_0077 427 | O19_0029 428 | O3_0040 429 | O15_0106 430 | O16_0062 431 | S3_0076 432 | O16_0025 433 | O2_0007 434 | T1_0119 435 | S2_0112 436 | O25_0098 437 | O12_0019 438 | S5_0022 439 | T4_0116 440 | S4_0039 441 | O14_0015 442 | O6_0020 443 | O17_0017 444 | O24_0036 445 | O15_0086 446 | O26_0002 447 | O22_0110 448 | O14_0087 449 | O18_0102 450 | O12_0143 451 | O10_0086 452 | O14_0001 453 | O16_0018 454 | T8_0105 455 | O22_0133 456 | O23_0056 457 | T4_0074 458 | O14_0106 459 | S4_0037 460 | O10_0061 461 | O1_0002 462 | T1_0047 463 | O19_0082 464 | T2_0108 465 | O14_0137 466 | O15_0053 467 | S4_0122 468 | O25_0073 469 | O16_0121 470 | S6_0092 471 | O17_0052 472 | O21_0125 473 | S2_0096 474 | O16_0075 475 | O5_0056 476 | T8_0085 477 | T4_0027 478 | T6_0104 479 | O11_0095 480 | O9_0004 481 | O4_0072 482 | O2_0041 483 | O26_0107 484 | O22_0014 485 | O4_0067 486 | T7_0136 487 | O21_0054 488 | O14_0020 489 | O2_0078 490 | T8_0033 491 | T5_0075 492 | O4_0068 493 | T2_0069 494 | O6_0076 495 | O7_0130 496 | T7_0017 497 | O14_0021 498 | O14_0088 499 | O9_0035 500 | O8_0023 501 | O9_0013 502 | O11_0030 503 | S2_0013 504 | O19_0022 505 | S1_0021 506 | T5_0092 507 | T8_0053 508 | O15_0114 509 | O21_0066 510 | O8_0094 511 | T1_0007 512 | O6_0064 513 | S1_0084 514 | O17_0123 515 | O2_0012 516 | T5_0094 517 | T2_0100 518 | O6_0035 519 | S1_0032 520 | O14_0107 521 | S3_0116 522 | O4_0002 523 | O7_0052 524 | O9_0094 525 | T3_0035 526 | T8_0041 527 | T7_0010 528 | O23_0054 529 | O14_0009 530 | S7_0124 531 | O23_0067 532 | S5_0020 533 | O25_0070 534 | O10_0108 535 | S4_0080 536 | S5_0114 537 | O2_0062 538 | S2_0054 539 | T8_0026 540 | O26_0046 541 | T6_0072 542 | S6_0132 543 | O10_0049 544 | O1_0054 545 | O12_0101 546 | O26_0025 547 | O23_0102 548 | O20_0051 549 | O26_0005 550 | S7_0015 551 | O1_0109 552 | O14_0016 553 | O23_0011 554 | O14_0113 555 | T1_0029 556 | O22_0074 557 | O24_0098 558 | O22_0020 559 | O8_0080 560 | O19_0012 561 | O25_0054 562 | O8_0034 563 | S2_0092 564 | S8_0098 565 | O6_0005 566 | S3_0106 567 | O14_0053 568 | O20_0129 569 | T1_0107 570 | S8_0080 571 | O13_0086 572 | O12_0092 573 | T1_0124 574 | O2_0073 575 | O12_0080 576 | O17_0048 577 | O21_0093 578 | S1_0010 579 | O5_0034 580 | O14_0018 581 | T7_0093 582 | O18_0087 583 | T5_0035 584 | S3_0109 585 | O22_0008 586 | O2_0038 587 | T5_0081 588 | O18_0075 589 | T5_0052 590 | T7_0145 591 | O7_0020 592 | T2_0079 593 | T1_0105 594 | T4_0069 595 | O13_0020 596 | O4_0058 597 | S4_0067 598 | O13_0042 599 | T2_0020 600 | O14_0140 601 | T5_0039 602 | O2_0125 603 | O8_0115 604 | T2_0121 605 | O19_0049 606 | T5_0087 607 | O5_0050 608 | O4_0084 609 | T4_0010 610 | O7_0012 611 | O6_0084 612 | O9_0084 613 | O23_0100 614 | O8_0109 615 | O1_0115 616 | S1_0069 617 | T3_0093 618 | O22_0053 619 | T5_0095 620 | S1_0083 621 | T2_0014 622 | O3_0043 623 | O12_0058 624 | T6_0014 625 | O12_0094 626 | O14_0089 627 | O22_0088 628 | S5_0050 629 | O14_0126 630 | O8_0027 631 | O20_0052 632 | O11_0063 633 | O25_0002 634 | O9_0090 635 | O24_0047 636 | S2_0011 637 | O22_0047 638 | O13_0094 639 | S3_0056 640 | O21_0104 641 | O9_0129 642 | O3_0112 643 | O13_0030 644 | S6_0053 645 | O20_0049 646 | S8_0004 647 | O19_0071 648 | O7_0140 649 | O11_0005 650 | S4_0111 651 | O26_0054 652 | O20_0085 653 | O2_0085 654 | S4_0123 655 | T4_0006 656 | S6_0124 657 | O19_0103 658 | T3_0077 659 | O19_0053 660 | O23_0013 661 | T1_0103 662 | O12_0079 663 | O26_0052 664 | O26_0068 665 | O25_0007 666 | O22_0084 667 | O5_0119 668 | O23_0023 669 | O18_0076 670 | O20_0032 671 | O9_0026 672 | O25_0016 673 | O9_0095 674 | O9_0046 675 | O21_0103 676 | S1_0125 677 | O14_0002 678 | O17_0007 679 | O12_0136 680 | T7_0117 681 | O20_0074 682 | O21_0037 683 | O11_0127 684 | T3_0099 685 | O25_0047 686 | S4_0124 687 | S2_0129 688 | O6_0082 689 | O6_0099 690 | O25_0053 691 | S8_0092 692 | T1_0086 693 | O24_0099 694 | O9_0022 695 | O9_0122 696 | O7_0100 697 | O5_0099 698 | O18_0106 699 | O19_0112 700 | O8_0067 701 | O2_0105 702 | T3_0135 703 | O4_0032 704 | S8_0021 705 | T2_0058 706 | S1_0031 707 | S5_0046 708 | S5_0005 709 | O5_0124 710 | O16_0074 711 | O20_0024 712 | O18_0064 713 | S8_0012 714 | T1_0039 715 | S7_0041 716 | O11_0061 717 | O21_0036 718 | O4_0076 719 | O4_0051 720 | T8_0078 721 | O26_0045 722 | O23_0086 723 | T5_0030 724 | O20_0077 725 | T7_0062 726 | S2_0050 727 | O14_0048 728 | T2_0059 729 | O26_0100 730 | O4_0061 731 | O10_0076 732 | O25_0012 733 | O20_0092 734 | S2_0046 735 | T2_0098 736 | O15_0010 737 | O23_0094 738 | O26_0111 739 | S4_0064 740 | O10_0113 741 | O19_0027 742 | O10_0099 743 | S5_0014 744 | O26_0076 745 | O3_0007 746 | O17_0029 747 | O21_0107 748 | T3_0076 749 | T3_0098 750 | O10_0120 751 | T4_0054 752 | O15_0007 753 | O14_0059 754 | O3_0053 755 | O20_0047 756 | O11_0099 757 | O6_0065 758 | T2_0089 759 | O16_0132 760 | S6_0052 761 | T2_0008 762 | O1_0085 763 | S8_0011 764 | O12_0138 765 | T8_0030 766 | S1_0039 767 | O5_0089 768 | O19_0035 769 | O17_0091 770 | O4_0042 771 | O7_0124 772 | S6_0071 773 | T2_0045 774 | S6_0103 775 | S8_0025 776 | T1_0048 777 | T4_0056 778 | O18_0082 779 | O26_0059 780 | T4_0023 781 | T1_0001 782 | O12_0075 783 | S7_0072 784 | O24_0097 785 | O16_0055 786 | O7_0031 787 | O14_0080 788 | O4_0107 789 | T2_0087 790 | O18_0077 791 | O7_0029 792 | O24_0056 793 | O12_0056 794 | O2_0011 795 | O12_0004 796 | O25_0062 797 | T2_0017 798 | O8_0021 799 | S8_0129 800 | T2_0018 801 | O19_0129 802 | T6_0021 803 | T2_0043 804 | T1_0034 805 | O10_0020 806 | O17_0093 807 | O14_0046 808 | O19_0116 809 | O21_0124 810 | O11_0015 811 | T5_0065 812 | O7_0003 813 | O4_0006 814 | O2_0101 815 | O23_0092 816 | O9_0111 817 | O22_0095 818 | T6_0065 819 | S6_0069 820 | O20_0141 821 | S7_0131 822 | T2_0074 823 | O21_0016 824 | S4_0052 825 | T3_0096 826 | O15_0078 827 | T5_0084 828 | S3_0098 829 | S4_0108 830 | T2_0033 831 | S4_0090 832 | T3_0072 833 | O23_0110 834 | O15_0056 835 | O25_0010 836 | O7_0062 837 | O7_0075 838 | O22_0027 839 | O20_0078 840 | O12_0011 841 | S3_0122 842 | T2_0072 843 | O8_0025 844 | O21_0033 845 | O11_0076 846 | T7_0134 847 | O22_0007 848 | O19_0073 849 | O1_0094 850 | O3_0127 851 | O18_0002 852 | O16_0103 853 | O14_0025 854 | O20_0118 855 | S5_0062 856 | S5_0076 857 | T2_0093 858 | O20_0040 859 | S7_0028 860 | O1_0108 861 | O24_0115 862 | O18_0081 863 | S8_0119 864 | O14_0086 865 | O21_0122 866 | S6_0019 867 | O3_0046 868 | O17_0008 869 | O24_0078 870 | S2_0058 871 | T1_0025 872 | O11_0004 873 | O20_0067 874 | T1_0078 875 | O20_0069 876 | O11_0064 877 | T4_0020 878 | O6_0086 879 | O3_0021 880 | O5_0062 881 | T7_0068 882 | O6_0062 883 | S6_0073 884 | T5_0104 885 | T3_0049 886 | S7_0123 887 | O13_0068 888 | T6_0048 889 | S7_0008 890 | O3_0020 891 | T1_0123 892 | O24_0121 893 | S5_0031 894 | S3_0079 895 | S5_0128 896 | S8_0049 897 | S3_0081 898 | O17_0120 899 | O5_0103 900 | O9_0124 901 | O3_0078 902 | O3_0026 903 | O24_0101 904 | O26_0072 905 | S5_0080 906 | O14_0143 907 | O22_0025 908 | O15_0113 909 | T2_0123 910 | O7_0056 911 | S2_0001 912 | O18_0112 913 | S6_0050 914 | O26_0099 915 | O24_0003 916 | O18_0126 917 | S2_0105 918 | O14_0032 919 | T7_0044 920 | O16_0119 921 | T5_0093 922 | O10_0077 923 | T3_0065 924 | O10_0026 925 | O16_0067 926 | O8_0057 927 | S4_0026 928 | O21_0082 929 | S6_0017 930 | O1_0069 931 | O6_0130 932 | S5_0091 933 | T7_0033 934 | T4_0004 935 | S6_0003 936 | S7_0030 937 | S1_0102 938 | O11_0056 939 | S1_0124 940 | O13_0092 941 | S4_0065 942 | O26_0115 943 | T1_0053 944 | O13_0056 945 | O26_0065 946 | O13_0115 947 | S1_0090 948 | T2_0021 949 | O16_0077 950 | T7_0128 951 | S8_0035 952 | S7_0045 953 | O9_0070 954 | O13_0019 955 | S6_0099 956 | T6_0060 957 | O19_0135 958 | S4_0098 959 | O26_0039 960 | O14_0004 961 | T7_0064 962 | S4_0087 963 | T4_0062 964 | O7_0095 965 | O2_0071 966 | T5_0034 967 | O2_0086 968 | T8_0006 969 | O11_0001 970 | O17_0059 971 | O18_0065 972 | O8_0104 973 | O7_0037 974 | T1_0042 975 | T7_0084 976 | T6_0010 977 | S6_0104 978 | S1_0072 979 | O7_0079 980 | O13_0013 981 | T8_0074 982 | O21_0005 983 | S8_0041 984 | T8_0080 985 | S6_0048 986 | O3_0119 987 | S1_0053 988 | T4_0124 989 | S3_0011 990 | O9_0051 991 | O16_0123 992 | T6_0047 993 | S8_0051 994 | T1_0033 995 | T4_0122 996 | O16_0003 997 | O10_0063 998 | O20_0135 999 | T3_0078 1000 | O17_0001 1001 | O13_0024 1002 | S8_0036 1003 | S1_0017 1004 | O16_0045 1005 | S8_0133 1006 | O5_0014 1007 | O25_0027 1008 | O8_0133 1009 | O17_0066 1010 | T8_0106 1011 | T1_0116 1012 | O24_0131 1013 | S6_0013 1014 | S7_0085 1015 | S8_0084 1016 | T6_0067 1017 | O6_0078 1018 | T6_0077 1019 | O18_0057 1020 | T3_0063 1021 | O26_0082 1022 | O11_0113 1023 | O23_0028 1024 | S8_0110 1025 | T7_0091 1026 | O2_0074 1027 | S2_0062 1028 | O22_0050 1029 | O21_0072 1030 | O11_0094 1031 | O16_0007 1032 | T3_0110 1033 | O13_0080 1034 | O15_0029 1035 | S2_0015 1036 | O15_0079 1037 | O22_0092 1038 | T8_0133 1039 | S7_0031 1040 | O20_0126 1041 | T8_0111 1042 | O12_0105 1043 | O9_0015 1044 | S3_0042 1045 | O26_0047 1046 | T8_0038 1047 | O1_0075 1048 | T4_0109 1049 | O5_0090 1050 | O21_0047 1051 | O5_0027 1052 | O24_0012 1053 | O7_0078 1054 | O23_0015 1055 | O26_0134 1056 | O21_0069 1057 | O20_0112 1058 | T1_0008 1059 | O4_0083 1060 | O23_0033 1061 | O7_0128 1062 | O6_0113 1063 | O20_0007 1064 | O19_0121 1065 | O3_0049 1066 | O21_0101 1067 | T2_0002 1068 | O2_0027 1069 | O16_0131 1070 | T7_0095 1071 | S5_0039 1072 | T3_0029 1073 | O12_0128 1074 | O10_0025 1075 | O23_0032 1076 | O3_0070 1077 | T7_0081 1078 | O19_0137 1079 | S6_0044 1080 | O18_0025 1081 | S7_0003 1082 | O26_0078 1083 | S2_0115 1084 | O1_0066 1085 | O18_0115 1086 | O8_0097 1087 | O10_0072 1088 | O26_0109 1089 | O2_0040 1090 | T8_0003 1091 | S5_0077 1092 | O9_0006 1093 | T1_0142 1094 | O10_0018 1095 | O9_0080 1096 | O19_0023 1097 | O5_0017 1098 | S7_0102 1099 | O15_0061 1100 | O26_0013 1101 | O19_0037 1102 | T2_0116 1103 | T8_0016 1104 | O5_0144 1105 | S4_0013 1106 | O19_0111 1107 | T8_0076 1108 | O2_0015 1109 | S5_0090 1110 | O12_0028 1111 | O3_0055 1112 | O18_0037 1113 | T6_0106 1114 | O20_0113 1115 | T4_0081 1116 | O18_0105 1117 | S3_0061 1118 | O2_0108 1119 | S1_0063 1120 | T3_0044 1121 | S7_0055 1122 | O23_0087 1123 | O1_0104 1124 | S2_0028 1125 | S1_0122 1126 | S2_0093 1127 | O8_0072 1128 | O14_0055 1129 | T5_0107 1130 | O9_0021 1131 | S5_0093 1132 | O10_0121 1133 | O23_0001 1134 | O14_0123 1135 | O15_0124 1136 | S7_0087 1137 | O8_0065 1138 | O8_0013 1139 | O5_0038 1140 | O21_0058 1141 | O7_0072 1142 | T6_0118 1143 | O22_0046 1144 | S1_0127 1145 | T8_0004 1146 | O8_0031 1147 | T5_0040 1148 | O7_0063 1149 | O18_0100 1150 | O23_0119 1151 | O24_0086 1152 | O18_0015 1153 | S8_0053 1154 | O25_0103 1155 | O24_0093 1156 | O3_0065 1157 | S6_0063 1158 | O21_0078 1159 | O13_0063 1160 | O18_0011 1161 | T6_0089 1162 | O26_0028 1163 | S8_0094 1164 | T3_0106 1165 | T8_0049 1166 | O12_0003 1167 | S6_0084 1168 | O14_0101 1169 | O8_0131 1170 | O19_0030 1171 | O17_0113 1172 | S7_0137 1173 | O5_0043 1174 | S3_0084 1175 | O10_0126 1176 | O12_0140 1177 | O17_0065 1178 | O23_0046 1179 | O18_0035 1180 | T3_0004 1181 | O26_0003 1182 | T1_0009 1183 | O11_0034 1184 | S7_0114 1185 | O24_0041 1186 | O18_0122 1187 | T6_0088 1188 | O2_0009 1189 | S7_0138 1190 | T8_0091 1191 | O1_0051 1192 | S5_0119 1193 | O19_0018 1194 | T8_0007 1195 | O18_0095 1196 | O2_0104 1197 | O8_0093 1198 | S8_0081 1199 | O19_0091 1200 | O2_0089 1201 | O15_0040 1202 | T6_0095 1203 | O7_0131 1204 | O10_0055 1205 | O16_0047 1206 | O5_0143 1207 | T7_0013 1208 | O3_0076 1209 | O8_0101 1210 | O18_0097 1211 | S4_0003 1212 | O7_0023 1213 | O9_0060 1214 | O16_0081 1215 | O4_0022 1216 | O19_0054 1217 | O21_0055 1218 | O9_0044 1219 | O17_0028 1220 | T3_0011 1221 | O16_0031 1222 | O5_0004 1223 | T3_0108 1224 | T2_0082 1225 | T6_0079 1226 | S4_0072 1227 | T6_0059 1228 | O23_0129 1229 | T8_0032 1230 | O24_0049 1231 | T3_0046 1232 | O26_0074 1233 | O24_0132 1234 | O19_0031 1235 | S8_0032 1236 | O14_0092 1237 | O17_0114 1238 | O22_0105 1239 | O10_0090 1240 | O13_0112 1241 | T7_0118 1242 | O16_0068 1243 | O5_0070 1244 | O24_0109 1245 | S2_0091 1246 | S3_0053 1247 | O23_0045 1248 | O3_0137 1249 | O25_0057 1250 | S3_0065 1251 | O4_0099 1252 | O13_0076 1253 | S1_0020 1254 | O10_0058 1255 | S4_0074 1256 | S7_0108 1257 | O11_0068 1258 | S7_0088 1259 | S7_0101 1260 | T4_0110 1261 | O2_0053 1262 | T6_0061 1263 | O20_0033 1264 | O16_0011 1265 | T3_0027 1266 | T7_0089 1267 | T8_0124 1268 | O24_0083 1269 | S1_0074 1270 | T7_0131 1271 | O1_0030 1272 | T7_0053 1273 | S5_0112 1274 | T8_0071 1275 | O12_0095 1276 | O7_0107 1277 | O3_0062 1278 | O25_0021 1279 | O20_0082 1280 | O17_0078 1281 | O26_0081 1282 | O2_0116 1283 | O22_0018 1284 | T4_0032 1285 | T8_0014 1286 | S7_0044 1287 | O1_0032 1288 | O9_0117 1289 | S5_0105 1290 | O23_0089 1291 | O12_0025 1292 | O19_0013 1293 | O19_0038 1294 | S3_0051 1295 | O11_0014 1296 | O24_0034 1297 | T5_0003 1298 | O6_0111 1299 | T2_0040 1300 | O8_0001 1301 | O14_0045 1302 | T8_0001 1303 | O9_0017 1304 | O19_0098 1305 | O12_0112 1306 | O22_0073 1307 | O9_0003 1308 | O3_0030 1309 | O1_0090 1310 | O2_0112 1311 | O20_0071 1312 | O14_0108 1313 | T8_0024 1314 | O4_0024 1315 | O11_0011 1316 | O11_0051 1317 | S3_0036 1318 | T7_0069 1319 | T5_0020 1320 | O24_0045 1321 | O9_0116 1322 | S4_0009 1323 | O3_0050 1324 | S4_0131 1325 | O7_0090 1326 | T4_0005 1327 | O10_0117 1328 | S1_0121 1329 | O21_0089 1330 | T5_0090 1331 | T6_0024 1332 | O13_0079 1333 | O8_0068 1334 | O12_0142 1335 | S1_0116 1336 | O6_0027 1337 | T6_0015 1338 | O7_0132 1339 | T7_0029 1340 | O22_0117 1341 | O26_0035 1342 | O16_0078 1343 | O24_0037 1344 | T3_0037 1345 | O5_0086 1346 | O19_0026 1347 | T4_0026 1348 | O11_0019 1349 | S7_0107 1350 | O13_0040 1351 | O9_0031 1352 | S3_0013 1353 | O3_0074 1354 | S8_0077 1355 | O14_0064 1356 | O11_0090 1357 | O17_0070 1358 | O6_0125 1359 | O5_0083 1360 | T2_0009 1361 | O22_0042 1362 | S5_0081 1363 | O24_0019 1364 | O7_0089 1365 | O11_0108 1366 | S6_0109 1367 | O15_0047 1368 | O19_0109 1369 | O1_0092 1370 | O6_0119 1371 | O13_0133 1372 | T1_0065 1373 | T1_0122 1374 | O16_0038 1375 | O15_0085 1376 | S6_0108 1377 | S2_0057 1378 | O17_0110 1379 | T8_0052 1380 | O18_0092 1381 | O7_0119 1382 | O16_0024 1383 | O11_0072 1384 | O13_0119 1385 | O4_0104 1386 | O11_0060 1387 | T6_0078 1388 | S8_0006 1389 | O5_0122 1390 | S8_0026 1391 | O24_0106 1392 | O15_0115 1393 | O21_0044 1394 | S2_0043 1395 | S8_0123 1396 | O9_0081 1397 | O19_0036 1398 | O13_0053 1399 | T6_0124 1400 | T2_0122 1401 | O6_0067 1402 | T3_0129 1403 | S3_0107 1404 | S6_0098 1405 | O8_0030 1406 | T3_0031 1407 | T3_0048 1408 | O18_0080 1409 | O25_0014 1410 | O9_0023 1411 | O4_0013 1412 | T4_0096 1413 | O20_0017 1414 | S5_0068 1415 | O2_0054 1416 | O6_0105 1417 | O1_0106 1418 | O6_0089 1419 | O3_0091 1420 | O18_0132 1421 | T4_0083 1422 | O8_0099 1423 | O22_0034 1424 | O16_0126 1425 | O1_0120 1426 | O4_0034 1427 | O18_0007 1428 | S1_0060 1429 | O21_0118 1430 | O5_0123 1431 | S2_0119 1432 | T1_0098 1433 | O1_0037 1434 | O24_0013 1435 | T3_0008 1436 | T1_0088 1437 | O22_0036 1438 | O9_0083 1439 | O15_0069 1440 | T7_0060 1441 | T3_0057 1442 | T2_0050 1443 | O19_0132 1444 | T1_0019 1445 | O22_0125 1446 | O21_0049 1447 | O9_0125 1448 | O12_0017 1449 | S5_0019 1450 | T4_0111 1451 | O15_0014 1452 | S3_0012 1453 | O10_0046 1454 | S3_0125 1455 | O3_0136 1456 | O20_0123 1457 | O9_0096 1458 | S6_0090 1459 | T3_0028 1460 | O3_0029 1461 | T6_0102 1462 | O18_0059 1463 | O14_0071 1464 | S2_0016 1465 | S3_0040 1466 | S2_0083 1467 | O26_0119 1468 | T8_0089 1469 | O19_0020 1470 | T6_0093 1471 | S2_0067 1472 | O20_0057 1473 | O12_0072 1474 | S7_0064 1475 | O2_0123 1476 | T8_0046 1477 | O6_0109 1478 | O5_0096 1479 | O21_0128 1480 | T8_0061 1481 | T1_0014 1482 | S7_0117 1483 | O16_0019 1484 | O5_0127 1485 | O26_0085 1486 | O16_0100 1487 | O23_0073 1488 | O16_0050 1489 | O11_0058 1490 | S4_0126 1491 | O9_0057 1492 | T7_0096 1493 | T1_0015 1494 | S3_0119 1495 | O21_0052 1496 | S5_0004 1497 | O13_0034 1498 | O4_0033 1499 | O12_0037 1500 | O26_0057 1501 | O4_0009 1502 | T4_0047 1503 | O6_0079 1504 | T4_0097 1505 | O19_0083 1506 | O16_0105 1507 | T5_0119 1508 | T5_0036 1509 | O20_0106 1510 | O6_0112 1511 | S4_0048 1512 | T3_0001 1513 | T8_0005 1514 | O22_0040 1515 | S2_0031 1516 | O1_0024 1517 | O3_0096 1518 | S5_0006 1519 | O7_0137 1520 | S6_0072 1521 | O13_0049 1522 | O15_0076 1523 | O8_0002 1524 | S7_0020 1525 | S1_0133 1526 | O18_0108 1527 | T2_0031 1528 | O11_0115 1529 | O14_0145 1530 | O7_0014 1531 | O2_0022 1532 | O22_0097 1533 | T6_0054 1534 | T7_0057 1535 | S8_0019 1536 | O20_0009 1537 | O5_0075 1538 | T4_0030 1539 | O17_0032 1540 | O6_0135 1541 | O12_0100 1542 | O14_0085 1543 | O6_0120 1544 | T1_0126 1545 | O7_0016 1546 | S3_0078 1547 | O19_0066 1548 | T3_0032 1549 | O12_0050 1550 | O8_0022 1551 | T5_0069 1552 | S3_0031 1553 | O21_0121 1554 | T6_0025 1555 | S4_0011 1556 | T8_0095 1557 | O11_0044 1558 | O4_0059 1559 | O9_0002 1560 | O12_0007 1561 | S8_0016 1562 | T7_0116 1563 | O15_0016 1564 | O22_0057 1565 | S1_0007 1566 | S5_0102 1567 | O21_0023 1568 | O23_0051 1569 | T8_0114 1570 | S5_0064 1571 | T3_0113 1572 | T7_0032 1573 | T7_0121 1574 | O18_0009 1575 | O13_0050 1576 | O13_0108 1577 | T6_0017 1578 | O16_0069 1579 | S4_0061 1580 | O20_0089 1581 | O8_0050 1582 | T4_0121 1583 | T8_0107 1584 | O24_0011 1585 | O24_0057 1586 | O17_0064 1587 | O12_0060 1588 | S5_0073 1589 | T5_0098 1590 | S6_0066 1591 | O12_0093 1592 | S2_0019 1593 | O5_0133 1594 | S1_0015 1595 | O17_0039 1596 | S4_0084 1597 | O26_0086 1598 | S6_0056 1599 | O11_0024 1600 | O23_0007 1601 | O8_0135 1602 | S8_0048 1603 | O24_0116 1604 | O20_0021 1605 | S7_0036 1606 | O14_0030 1607 | O22_0009 1608 | S7_0054 1609 | O25_0001 1610 | O20_0114 1611 | O2_0056 1612 | O21_0115 1613 | T6_0043 1614 | O11_0128 1615 | O7_0141 1616 | T5_0089 1617 | O9_0073 1618 | T6_0114 1619 | O3_0038 1620 | T4_0129 1621 | T4_0120 1622 | O5_0154 1623 | T3_0125 1624 | O11_0016 1625 | O11_0107 1626 | O21_0061 1627 | O1_0053 1628 | S5_0110 1629 | S6_0033 1630 | O9_0018 1631 | S6_0001 1632 | O10_0052 1633 | O7_0112 1634 | O4_0021 1635 | O19_0014 1636 | O15_0067 1637 | O22_0071 1638 | O6_0069 1639 | O6_0093 1640 | T8_0015 1641 | S7_0091 1642 | O8_0033 1643 | O22_0056 1644 | O19_0048 1645 | O12_0051 1646 | S5_0078 1647 | O10_0081 1648 | O14_0114 1649 | O7_0099 1650 | S4_0015 1651 | O19_0068 1652 | O9_0053 1653 | O1_0033 1654 | T2_0090 1655 | O15_0036 1656 | O6_0108 1657 | T4_0130 1658 | S7_0047 1659 | T4_0112 1660 | O17_0057 1661 | O8_0066 1662 | S8_0087 1663 | O23_0048 1664 | O25_0043 1665 | O4_0020 1666 | T4_0021 1667 | O8_0111 1668 | S6_0025 1669 | O2_0025 1670 | O15_0089 1671 | O19_0102 1672 | O17_0055 1673 | O19_0045 1674 | O17_0100 1675 | O8_0132 1676 | O6_0013 1677 | O9_0105 1678 | S8_0038 1679 | O1_0083 1680 | T4_0042 1681 | O19_0127 1682 | O6_0018 1683 | O9_0128 1684 | O11_0017 1685 | O16_0030 1686 | T5_0132 1687 | O19_0118 1688 | O2_0128 1689 | T1_0056 1690 | T5_0044 1691 | O16_0013 1692 | O8_0007 1693 | S2_0024 1694 | S7_0092 1695 | O6_0052 1696 | O18_0055 1697 | S1_0123 1698 | O6_0008 1699 | T2_0112 1700 | S1_0129 1701 | O12_0061 1702 | T3_0091 1703 | O4_0111 1704 | O13_0064 1705 | T7_0026 1706 | T5_0046 1707 | S2_0053 1708 | T1_0055 1709 | O25_0108 1710 | S2_0056 1711 | T1_0106 1712 | T3_0010 1713 | O24_0075 1714 | O15_0062 1715 | S5_0008 1716 | O3_0052 1717 | T5_0116 1718 | T2_0129 1719 | O18_0014 1720 | O16_0095 1721 | O8_0145 1722 | T7_0025 1723 | O10_0074 1724 | O4_0073 1725 | S3_0029 1726 | O10_0123 1727 | O8_0143 1728 | O16_0122 1729 | T2_0055 1730 | O18_0046 1731 | O26_0125 1732 | O5_0134 1733 | O17_0040 1734 | O2_0031 1735 | S7_0004 1736 | S7_0032 1737 | O24_0090 1738 | O9_0001 1739 | O22_0126 1740 | O14_0047 1741 | O21_0017 1742 | O23_0008 1743 | O13_0085 1744 | O16_0060 1745 | O3_0016 1746 | S3_0003 1747 | S1_0089 1748 | S2_0035 1749 | O12_0099 1750 | O9_0061 1751 | O19_0055 1752 | O12_0121 1753 | O10_0119 1754 | O26_0038 1755 | O26_0022 1756 | T8_0102 1757 | O2_0026 1758 | S1_0131 1759 | S8_0116 1760 | O5_0156 1761 | O25_0067 1762 | O21_0060 1763 | O12_0054 1764 | O25_0045 1765 | S6_0076 1766 | T6_0023 1767 | O22_0123 1768 | O16_0004 1769 | O19_0069 1770 | O17_0119 1771 | O22_0129 1772 | S7_0094 1773 | T8_0109 1774 | O24_0079 1775 | O8_0055 1776 | O3_0083 1777 | O16_0098 1778 | O12_0096 1779 | O8_0038 1780 | O8_0039 1781 | T3_0141 1782 | S8_0132 1783 | O23_0014 1784 | O23_0075 1785 | S7_0073 1786 | S4_0115 1787 | S1_0098 1788 | S7_0130 1789 | T5_0010 1790 | O7_0135 1791 | S3_0072 1792 | O15_0090 1793 | O5_0146 1794 | O17_0128 1795 | O2_0049 1796 | O12_0087 1797 | S6_0111 1798 | O14_0127 1799 | S6_0114 1800 | T1_0077 1801 | T3_0071 1802 | O12_0078 1803 | T5_0062 1804 | O18_0052 1805 | O25_0092 1806 | O22_0109 1807 | O25_0082 1808 | O12_0120 1809 | O10_0030 1810 | O25_0058 1811 | T6_0026 1812 | O11_0077 1813 | S4_0079 1814 | O15_0028 1815 | T1_0131 1816 | O4_0077 1817 | O13_0113 1818 | O26_0017 1819 | O11_0046 1820 | O21_0091 1821 | O26_0015 1822 | O12_0131 1823 | O20_0120 1824 | O25_0013 1825 | O12_0130 1826 | O6_0073 1827 | O14_0019 1828 | O13_0098 1829 | O7_0136 1830 | S2_0071 1831 | O7_0142 1832 | O18_0120 1833 | T1_0016 1834 | O13_0012 1835 | O6_0092 1836 | S5_0028 1837 | S6_0068 1838 | T5_0130 1839 | T7_0130 1840 | T5_0128 1841 | T3_0097 1842 | S2_0041 1843 | O5_0092 1844 | O5_0153 1845 | O11_0033 1846 | O12_0029 1847 | O14_0130 1848 | T4_0127 1849 | O5_0095 1850 | O20_0099 1851 | O24_0071 1852 | O23_0025 1853 | T8_0039 1854 | S3_0111 1855 | T6_0040 1856 | T4_0036 1857 | O5_0097 1858 | T1_0127 1859 | T2_0011 1860 | T5_0022 1861 | O24_0100 1862 | O2_0067 1863 | S4_0071 1864 | T5_0002 1865 | T8_0023 1866 | O19_0107 1867 | T5_0113 1868 | S4_0006 1869 | O20_0036 1870 | O23_0112 1871 | O20_0117 1872 | O10_0008 1873 | T8_0065 1874 | T8_0062 1875 | O6_0032 1876 | O17_0098 1877 | O16_0059 1878 | T3_0100 1879 | O8_0043 1880 | T1_0013 1881 | O10_0107 1882 | S4_0063 1883 | T5_0060 1884 | S7_0033 1885 | S8_0105 1886 | S2_0002 1887 | O13_0073 1888 | O21_0076 1889 | O10_0042 1890 | T5_0108 1891 | O13_0120 1892 | O17_0084 1893 | O14_0003 1894 | O12_0059 1895 | O15_0035 1896 | O7_0088 1897 | O17_0087 1898 | O2_0079 1899 | T3_0138 1900 | T2_0044 1901 | O12_0091 1902 | O1_0056 1903 | O14_0082 1904 | O5_0085 1905 | O10_0115 1906 | O11_0032 1907 | T4_0114 1908 | O25_0020 1909 | T8_0103 1910 | T7_0059 1911 | O23_0113 1912 | T3_0107 1913 | O24_0050 1914 | O24_0085 1915 | S7_0145 1916 | T7_0022 1917 | O8_0112 1918 | S6_0091 1919 | O25_0071 1920 | T2_0085 1921 | O6_0003 1922 | T6_0091 1923 | T5_0079 1924 | O19_0105 1925 | O12_0115 1926 | T4_0053 1927 | O15_0074 1928 | O13_0111 1929 | O2_0083 1930 | O18_0125 1931 | O23_0037 1932 | O2_0034 1933 | O25_0061 1934 | O18_0098 1935 | O25_0081 1936 | O14_0034 1937 | T7_0087 1938 | O7_0091 1939 | O12_0116 1940 | O24_0065 1941 | S2_0130 1942 | O15_0083 1943 | S6_0038 1944 | O6_0014 1945 | S7_0103 1946 | O22_0108 1947 | T4_0061 1948 | S7_0011 1949 | T2_0114 1950 | T4_0048 1951 | O15_0119 1952 | O22_0065 1953 | O4_0123 1954 | O6_0095 1955 | T1_0051 1956 | O13_0058 1957 | T3_0036 1958 | O23_0124 1959 | O14_0017 1960 | T1_0128 1961 | O7_0019 1962 | T3_0070 1963 | O6_0106 1964 | S3_0066 1965 | S3_0014 1966 | O10_0028 1967 | O9_0109 1968 | O1_0114 1969 | S2_0020 1970 | O26_0104 1971 | O13_0010 1972 | O14_0122 1973 | O20_0094 1974 | O7_0109 1975 | O7_0051 1976 | O13_0028 1977 | S7_0143 1978 | S2_0021 1979 | O11_0067 1980 | S3_0132 1981 | S7_0037 1982 | S4_0086 1983 | O21_0015 1984 | T8_0020 1985 | S1_0019 1986 | O22_0087 1987 | S1_0004 1988 | O22_0094 1989 | O3_0057 1990 | S2_0006 1991 | O3_0032 1992 | O1_0052 1993 | O21_0042 1994 | T7_0079 1995 | O26_0066 1996 | O23_0062 1997 | T4_0063 1998 | O24_0010 1999 | O18_0041 2000 | O13_0070 2001 | S2_0103 2002 | O14_0100 2003 | S7_0002 2004 | T1_0091 2005 | S4_0012 2006 | S3_0054 2007 | O2_0051 2008 | O22_0093 2009 | S5_0057 2010 | O22_0115 2011 | S1_0030 2012 | O18_0039 2013 | O6_0039 2014 | O16_0051 2015 | O23_0018 2016 | O7_0085 2017 | O13_0117 2018 | O10_0027 2019 | T3_0015 2020 | O26_0024 2021 | O8_0142 2022 | O9_0088 2023 | T4_0037 2024 | O5_0044 2025 | O24_0089 2026 | T5_0117 2027 | S5_0107 2028 | O4_0119 2029 | O5_0073 2030 | O5_0053 2031 | O1_0045 2032 | S5_0130 2033 | S3_0032 2034 | O1_0001 2035 | O2_0131 2036 | S5_0038 2037 | O5_0140 2038 | S6_0059 2039 | O14_0026 2040 | O12_0048 2041 | T2_0139 2042 | O9_0034 2043 | O3_0035 2044 | S2_0030 2045 | T4_0017 2046 | O10_0082 2047 | T3_0051 2048 | O6_0118 2049 | S4_0043 2050 | T7_0015 2051 | S1_0085 2052 | O23_0123 2053 | O5_0138 2054 | S7_0022 2055 | S3_0115 2056 | O12_0018 2057 | O20_0018 2058 | O3_0109 2059 | O13_0074 2060 | O12_0038 2061 | O20_0044 2062 | O17_0022 2063 | T3_0047 2064 | T1_0076 2065 | S8_0054 2066 | S6_0078 2067 | S2_0070 2068 | S2_0009 2069 | T2_0004 2070 | O4_0008 2071 | O3_0087 2072 | O4_0124 2073 | S6_0116 2074 | O10_0103 2075 | S6_0049 2076 | S3_0097 2077 | O11_0117 2078 | O5_0121 2079 | O23_0098 2080 | O4_0086 2081 | O23_0115 2082 | T1_0136 2083 | S1_0132 2084 | O15_0130 2085 | O5_0002 2086 | T3_0083 2087 | S7_0144 2088 | T3_0133 2089 | O24_0084 2090 | O22_0099 2091 | T4_0108 2092 | T4_0091 2093 | O24_0123 2094 | O13_0054 2095 | S3_0046 2096 | S2_0012 2097 | T1_0017 2098 | O26_0130 2099 | S1_0027 2100 | O25_0038 2101 | O5_0082 2102 | O4_0130 2103 | O20_0039 2104 | S2_0080 2105 | S7_0118 2106 | S5_0027 2107 | T2_0006 2108 | O14_0146 2109 | O11_0091 2110 | O9_0098 2111 | O16_0016 2112 | O23_0036 2113 | O1_0049 2114 | T6_0031 2115 | O17_0082 2116 | T1_0021 2117 | T2_0115 2118 | S6_0121 2119 | T4_0065 2120 | O9_0065 2121 | T1_0052 2122 | O8_0008 2123 | O18_0093 2124 | S8_0074 2125 | S1_0043 2126 | S8_0082 2127 | S7_0132 2128 | O26_0061 2129 | S2_0065 2130 | S4_0103 2131 | O2_0132 2132 | O21_0019 2133 | O26_0094 2134 | T8_0072 2135 | S8_0059 2136 | S5_0029 2137 | O25_0064 2138 | O19_0122 2139 | O22_0003 2140 | S2_0017 2141 | O18_0048 2142 | O1_0064 2143 | T3_0056 2144 | O15_0091 2145 | T8_0126 2146 | O22_0072 2147 | O20_0103 2148 | O25_0099 2149 | O7_0071 2150 | O22_0031 2151 | S3_0001 2152 | O15_0008 2153 | S4_0008 2154 | S6_0082 2155 | O6_0098 2156 | S6_0070 2157 | O8_0070 2158 | O11_0026 2159 | O4_0121 2160 | O18_0043 2161 | O12_0117 2162 | O8_0088 2163 | S5_0129 2164 | O12_0110 2165 | O4_0080 2166 | T2_0131 2167 | S8_0072 2168 | S8_0128 2169 | S2_0032 2170 | O8_0146 2171 | O16_0010 2172 | O20_0042 2173 | O14_0041 2174 | S4_0096 2175 | O11_0130 2176 | S8_0109 2177 | T5_0082 2178 | T6_0092 2179 | O13_0122 2180 | T7_0023 2181 | O18_0069 2182 | S5_0123 2183 | S1_0045 2184 | O17_0127 2185 | O19_0072 2186 | S8_0103 2187 | O25_0039 2188 | T4_0035 2189 | O3_0010 2190 | O6_0049 2191 | O1_0117 2192 | O25_0023 2193 | O9_0016 2194 | O10_0036 2195 | O7_0005 2196 | T7_0106 2197 | O5_0064 2198 | O20_0093 2199 | O4_0087 2200 | T3_0040 2201 | S2_0098 2202 | O19_0058 2203 | S2_0049 2204 | T4_0131 2205 | O8_0082 2206 | T6_0053 2207 | O19_0070 2208 | O9_0038 2209 | O11_0120 2210 | O19_0004 2211 | O26_0004 2212 | O10_0097 2213 | O21_0062 2214 | O4_0040 2215 | O11_0052 2216 | S8_0065 2217 | O17_0049 2218 | O2_0017 2219 | O13_0101 2220 | O13_0103 2221 | O25_0040 2222 | O22_0035 2223 | O17_0044 2224 | O7_0044 2225 | O1_0124 2226 | O7_0032 2227 | O13_0046 2228 | O23_0061 2229 | O21_0065 2230 | O2_0093 2231 | S4_0119 2232 | S7_0052 2233 | O19_0047 2234 | O11_0018 2235 | O24_0135 2236 | O24_0070 2237 | O4_0106 2238 | O8_0044 2239 | O9_0103 2240 | O9_0135 2241 | O7_0126 2242 | T8_0077 2243 | O15_0105 2244 | O25_0129 2245 | O21_0075 2246 | O12_0081 2247 | S3_0067 2248 | O16_0104 2249 | O2_0014 2250 | O6_0088 2251 | T6_0034 2252 | T1_0043 2253 | S6_0006 2254 | O25_0041 2255 | O22_0033 2256 | T1_0002 2257 | T7_0028 2258 | T6_0113 2259 | O4_0081 2260 | S8_0055 2261 | O20_0139 2262 | S6_0023 2263 | T2_0092 2264 | O3_0025 2265 | O5_0031 2266 | O23_0058 2267 | S2_0033 2268 | O16_0087 2269 | O5_0059 2270 | O4_0118 2271 | O19_0056 2272 | O1_0082 2273 | O5_0012 2274 | T6_0071 2275 | O8_0003 2276 | O10_0106 2277 | O21_0127 2278 | T5_0097 2279 | S5_0098 2280 | O17_0109 2281 | O13_0018 2282 | T5_0049 2283 | O20_0081 2284 | S6_0014 2285 | O1_0008 2286 | S6_0105 2287 | O23_0029 2288 | T1_0137 2289 | O10_0039 2290 | S4_0125 2291 | T4_0033 2292 | O9_0059 2293 | S5_0001 2294 | O12_0010 2295 | O3_0066 2296 | S7_0104 2297 | T7_0056 2298 | O19_0097 2299 | O8_0129 2300 | O22_0079 2301 | O15_0018 2302 | S8_0090 2303 | S5_0011 2304 | O20_0045 2305 | O6_0051 2306 | O6_0053 2307 | O10_0005 2308 | O25_0115 2309 | T6_0037 2310 | T6_0055 2311 | O25_0083 2312 | O17_0102 2313 | O11_0103 2314 | O7_0087 2315 | S8_0107 2316 | O18_0090 2317 | O20_0010 2318 | T8_0069 2319 | O8_0120 2320 | O2_0129 2321 | O17_0002 2322 | O8_0090 2323 | O2_0106 2324 | T3_0142 2325 | T6_0121 2326 | S8_0088 2327 | O21_0046 2328 | S6_0080 2329 | T5_0053 2330 | S8_0043 2331 | O17_0103 2332 | O10_0111 2333 | O3_0088 2334 | O1_0126 2335 | O25_0018 2336 | O8_0074 2337 | O7_0122 2338 | O18_0012 2339 | O21_0027 2340 | T3_0009 2341 | S3_0114 2342 | S3_0086 2343 | S1_0113 2344 | T2_0013 2345 | O15_0084 2346 | T2_0105 2347 | S5_0007 2348 | O14_0097 2349 | O23_0004 2350 | T4_0040 2351 | T1_0084 2352 | O8_0062 2353 | T4_0041 2354 | O20_0127 2355 | O4_0047 2356 | T6_0050 2357 | O21_0086 2358 | O14_0081 2359 | S7_0129 2360 | O4_0082 2361 | O21_0029 2362 | T4_0029 2363 | S2_0005 2364 | O24_0025 2365 | O21_0088 2366 | T3_0112 2367 | S8_0078 2368 | O2_0046 2369 | T5_0126 2370 | O10_0110 2371 | O13_0124 2372 | S4_0021 2373 | S1_0024 2374 | S1_0058 2375 | T6_0115 2376 | T7_0051 2377 | O25_0049 2378 | S6_0010 2379 | T3_0007 2380 | O6_0040 2381 | T1_0024 2382 | S4_0054 2383 | O22_0064 2384 | O4_0078 2385 | T5_0032 2386 | O9_0056 2387 | T5_0058 2388 | O20_0131 2389 | T8_0010 2390 | O16_0083 2391 | O14_0022 2392 | T8_0019 2393 | O15_0073 2394 | O19_0042 2395 | O24_0113 2396 | S4_0114 2397 | O13_0067 2398 | O3_0071 2399 | T3_0020 2400 | T7_0078 2401 | O23_0050 2402 | O1_0093 2403 | O10_0109 2404 | T5_0123 2405 | O26_0108 2406 | T7_0039 2407 | O22_0103 2408 | O12_0097 2409 | O19_0041 2410 | O17_0011 2411 | O1_0070 2412 | O7_0096 2413 | O5_0100 2414 | O9_0082 2415 | T7_0075 2416 | T7_0129 2417 | O9_0097 2418 | S6_0058 2419 | O11_0062 2420 | O5_0065 2421 | O14_0028 2422 | O16_0057 2423 | S4_0076 2424 | O24_0009 2425 | O25_0074 2426 | O6_0060 2427 | O22_0077 2428 | O26_0092 2429 | O2_0097 2430 | T7_0097 2431 | S8_0127 2432 | O10_0031 2433 | S8_0120 2434 | T6_0058 2435 | O5_0130 2436 | S5_0042 2437 | O17_0101 2438 | O20_0034 2439 | O24_0107 2440 | T3_0026 2441 | S1_0094 2442 | O22_0024 2443 | T6_0045 2444 | S2_0085 2445 | S5_0089 2446 | S2_0055 2447 | O16_0120 2448 | O2_0098 2449 | O26_0032 2450 | T2_0084 2451 | O18_0028 2452 | O15_0039 2453 | O13_0014 2454 | T4_0095 2455 | O8_0126 2456 | O5_0151 2457 | T5_0018 2458 | T5_0061 2459 | O22_0098 2460 | O9_0071 2461 | S5_0113 2462 | S5_0131 2463 | T7_0009 2464 | O24_0134 2465 | O8_0083 2466 | O21_0085 2467 | T5_0008 2468 | T8_0063 2469 | S6_0112 2470 | O1_0043 2471 | O20_0080 2472 | O25_0114 2473 | O8_0091 2474 | O2_0084 2475 | O12_0107 2476 | T6_0105 2477 | S8_0101 2478 | O21_0131 2479 | O18_0084 2480 | S4_0110 2481 | O24_0008 2482 | T7_0072 2483 | O11_0126 2484 | O15_0050 2485 | O1_0038 2486 | O11_0093 2487 | S5_0101 2488 | O22_0052 2489 | S8_0064 2490 | O11_0116 2491 | S3_0021 2492 | O5_0111 2493 | O26_0113 2494 | T6_0097 2495 | O23_0047 2496 | O15_0080 2497 | S7_0083 2498 | S6_0045 2499 | O15_0011 2500 | S5_0040 2501 | O5_0020 2502 | T4_0106 2503 | S7_0039 2504 | S3_0028 2505 | O20_0063 2506 | O15_0132 2507 | T5_0110 2508 | S2_0089 2509 | S7_0099 2510 | S3_0121 2511 | O10_0091 2512 | O6_0061 2513 | O11_0065 2514 | O12_0063 2515 | O1_0029 2516 | O4_0096 2517 | O6_0097 2518 | O13_0036 2519 | T2_0032 2520 | T1_0082 2521 | O12_0071 2522 | O7_0040 2523 | T7_0123 2524 | O3_0027 2525 | S8_0047 2526 | O8_0078 2527 | O24_0058 2528 | O10_0105 2529 | O5_0032 2530 | O23_0125 2531 | O6_0085 2532 | S5_0063 2533 | O23_0006 2534 | O22_0012 2535 | O17_0035 2536 | S1_0050 2537 | T5_0074 2538 | O5_0087 2539 | T6_0052 2540 | O23_0002 2541 | O3_0111 2542 | O26_0006 2543 | T8_0123 2544 | S2_0111 2545 | T3_0055 2546 | O15_0001 2547 | O9_0123 2548 | S3_0002 2549 | S4_0112 2550 | O17_0121 2551 | O5_0011 2552 | O3_0013 2553 | O25_0022 2554 | O2_0020 2555 | S7_0100 2556 | O22_0001 2557 | O11_0080 2558 | T6_0028 2559 | O22_0080 2560 | O26_0083 2561 | S7_0090 2562 | S2_0126 2563 | T7_0005 2564 | O6_0124 2565 | O16_0099 2566 | O24_0105 2567 | O24_0051 2568 | S3_0024 2569 | T7_0071 2570 | T2_0104 2571 | O2_0055 2572 | O3_0092 2573 | O17_0043 2574 | O26_0008 2575 | S7_0125 2576 | S5_0070 2577 | T7_0073 2578 | T5_0106 2579 | O18_0001 2580 | O9_0041 2581 | O21_0090 2582 | O5_0136 2583 | O20_0134 2584 | O4_0023 2585 | O11_0013 2586 | O19_0002 2587 | O12_0030 2588 | S7_0076 2589 | S1_0014 2590 | O24_0077 2591 | S6_0021 2592 | O2_0122 2593 | O10_0016 2594 | O23_0103 2595 | O26_0102 2596 | T1_0108 2597 | O2_0013 2598 | O2_0061 2599 | T4_0102 2600 | O18_0130 2601 | S2_0102 2602 | O22_0013 2603 | O5_0041 2604 | S6_0043 2605 | O14_0136 2606 | S7_0134 2607 | O15_0038 2608 | O17_0068 2609 | O11_0003 2610 | T1_0046 2611 | O3_0011 2612 | O19_0050 2613 | O6_0024 2614 | S5_0021 2615 | O20_0110 2616 | O14_0091 2617 | O21_0112 2618 | O24_0094 2619 | O25_0120 2620 | O15_0116 2621 | O21_0099 2622 | O7_0068 2623 | O2_0075 2624 | S4_0107 2625 | O10_0101 2626 | O21_0100 2627 | O7_0070 2628 | O6_0010 2629 | O20_0001 2630 | S8_0085 2631 | T4_0125 2632 | O7_0061 2633 | O12_0141 2634 | O7_0102 2635 | O8_0092 2636 | O4_0057 2637 | O24_0125 2638 | O10_0057 2639 | O15_0120 2640 | S6_0129 2641 | O16_0110 2642 | O11_0049 2643 | O21_0035 2644 | O8_0100 2645 | S6_0002 2646 | O26_0135 2647 | S2_0114 2648 | T3_0095 2649 | O22_0059 2650 | T4_0007 2651 | S7_0062 2652 | O5_0007 2653 | O8_0121 2654 | O20_0121 2655 | O14_0073 2656 | O9_0115 2657 | O16_0080 2658 | O23_0079 2659 | O15_0049 2660 | O6_0100 2661 | T1_0070 2662 | O10_0001 2663 | O5_0093 2664 | O14_0066 2665 | T8_0092 2666 | S3_0033 2667 | S6_0065 2668 | T2_0073 2669 | O4_0085 2670 | O13_0037 2671 | O21_0048 2672 | S8_0002 2673 | S1_0022 2674 | O6_0117 2675 | O16_0040 2676 | S5_0013 2677 | S8_0063 2678 | O4_0028 2679 | T8_0040 2680 | O10_0056 2681 | O2_0066 2682 | O20_0013 2683 | O17_0075 2684 | O23_0059 2685 | O18_0109 2686 | T1_0115 2687 | T4_0067 2688 | O12_0122 2689 | O13_0035 2690 | O4_0110 2691 | S4_0095 2692 | O19_0120 2693 | O4_0027 2694 | O3_0117 2695 | O10_0102 2696 | T5_0118 2697 | O3_0084 2698 | O13_0033 2699 | S1_0057 2700 | O1_0013 2701 | S3_0105 2702 | T4_0072 2703 | O15_0033 2704 | O15_0128 2705 | T8_0036 2706 | O26_0019 2707 | O20_0138 2708 | S3_0087 2709 | O19_0133 2710 | T7_0133 2711 | T2_0027 2712 | T8_0110 2713 | S7_0046 2714 | O16_0084 2715 | S1_0126 2716 | O18_0089 2717 | O5_0149 2718 | T3_0061 2719 | T5_0031 2720 | O2_0050 2721 | O10_0010 2722 | T1_0090 2723 | O12_0012 2724 | O2_0002 2725 | T2_0024 2726 | O21_0084 2727 | O21_0053 2728 | O24_0062 2729 | O9_0008 2730 | S7_0048 2731 | O2_0100 2732 | O1_0074 2733 | O4_0129 2734 | O12_0133 2735 | O4_0056 2736 | O11_0131 2737 | T5_0057 2738 | T5_0099 2739 | T7_0090 2740 | S6_0055 2741 | S7_0060 2742 | S7_0049 2743 | O22_0120 2744 | T5_0066 2745 | O26_0080 2746 | T4_0002 2747 | T6_0039 2748 | T5_0033 2749 | S1_0101 2750 | O15_0057 2751 | O17_0037 2752 | S1_0070 2753 | T1_0079 2754 | O25_0033 2755 | O12_0066 2756 | S3_0101 2757 | O2_0124 2758 | S1_0029 2759 | T7_0067 2760 | O19_0060 2761 | S4_0055 2762 | S7_0119 2763 | O7_0066 2764 | T4_0058 2765 | O10_0094 2766 | O9_0106 2767 | O10_0047 2768 | T3_0030 2769 | O15_0100 2770 | O26_0087 2771 | O4_0114 2772 | O11_0086 2773 | T7_0031 2774 | O17_0130 2775 | O10_0019 2776 | S5_0044 2777 | S6_0083 2778 | T5_0007 2779 | T7_0107 2780 | O17_0073 2781 | O25_0031 2782 | T4_0089 2783 | O22_0124 2784 | O15_0125 2785 | O16_0009 2786 | O8_0089 2787 | O21_0126 2788 | O22_0054 2789 | O18_0131 2790 | O5_0125 2791 | O1_0103 2792 | O9_0063 2793 | O9_0102 2794 | O16_0114 2795 | O26_0048 2796 | O10_0070 2797 | O10_0041 2798 | O9_0047 2799 | O3_0024 2800 | T6_0130 2801 | O5_0063 2802 | O4_0060 2803 | O20_0028 2804 | O4_0035 2805 | T4_0104 2806 | O9_0024 2807 | O21_0073 2808 | O4_0131 2809 | S2_0023 2810 | T1_0067 2811 | O7_0101 2812 | O5_0157 2813 | T3_0074 2814 | O21_0006 2815 | T7_0109 2816 | S7_0053 2817 | O16_0070 2818 | O11_0110 2819 | T3_0017 2820 | O21_0080 2821 | O11_0112 2822 | O3_0099 2823 | T1_0028 2824 | O19_0126 2825 | O19_0092 2826 | O16_0006 2827 | T5_0045 2828 | O19_0076 2829 | O1_0080 2830 | O13_0015 2831 | O20_0111 2832 | O18_0054 2833 | O9_0075 2834 | T8_0101 2835 | T8_0054 2836 | T5_0103 2837 | T4_0098 2838 | S2_0060 2839 | O11_0098 2840 | O9_0118 2841 | O24_0068 2842 | O13_0048 2843 | O2_0008 2844 | O21_0031 2845 | S5_0036 2846 | S5_0030 2847 | O26_0106 2848 | O21_0059 2849 | S6_0016 2850 | O16_0043 2851 | S3_0022 2852 | S4_0047 2853 | O1_0028 2854 | S1_0106 2855 | O19_0005 2856 | O24_0126 2857 | O11_0073 2858 | O25_0090 2859 | O9_0126 2860 | O4_0112 2861 | O3_0085 2862 | O4_0037 2863 | S1_0076 2864 | T2_0096 2865 | O20_0003 2866 | O23_0043 2867 | O25_0035 2868 | O8_0026 2869 | T3_0002 2870 | S6_0062 2871 | S1_0002 2872 | S5_0072 2873 | S1_0130 2874 | T6_0129 2875 | S1_0044 2876 | T4_0093 2877 | O25_0110 2878 | S7_0068 2879 | O6_0006 2880 | O9_0033 2881 | O20_0090 2882 | O5_0126 2883 | O19_0089 2884 | O25_0089 2885 | O11_0055 2886 | O14_0138 2887 | T1_0032 2888 | O1_0127 2889 | T7_0143 2890 | S1_0062 2891 | S6_0027 2892 | O5_0148 2893 | T5_0055 2894 | T2_0025 2895 | O8_0005 2896 | O13_0102 2897 | O14_0024 2898 | S5_0033 2899 | O18_0051 2900 | O11_0084 2901 | O8_0012 2902 | T8_0093 2903 | O13_0003 2904 | O6_0116 2905 | T5_0111 2906 | S8_0093 2907 | O21_0113 2908 | O5_0141 2909 | S4_0130 2910 | T7_0115 2911 | S4_0030 2912 | S2_0117 2913 | O4_0120 2914 | S2_0010 2915 | O23_0055 2916 | O18_0124 2917 | O17_0053 2918 | O25_0063 2919 | T3_0134 2920 | O13_0004 2921 | O7_0110 2922 | O3_0094 2923 | O15_0066 2924 | O25_0116 2925 | O14_0052 2926 | O1_0021 2927 | O17_0033 2928 | S6_0064 2929 | S3_0068 2930 | O7_0058 2931 | O15_0129 2932 | T2_0086 2933 | O2_0048 2934 | T1_0036 2935 | T6_0127 2936 | O3_0115 2937 | T6_0066 2938 | S6_0081 2939 | O6_0029 2940 | O21_0077 2941 | O3_0064 2942 | O19_0007 2943 | O14_0119 2944 | O26_0060 2945 | O9_0067 2946 | O5_0049 2947 | O14_0051 2948 | O5_0001 2949 | O21_0110 2950 | O25_0068 2951 | T5_0109 2952 | O3_0061 2953 | O17_0085 2954 | O18_0006 2955 | O18_0066 2956 | O14_0007 2957 | S6_0085 2958 | O23_0109 2959 | O19_0046 2960 | O5_0048 2961 | S6_0097 2962 | T3_0124 2963 | O22_0058 2964 | O16_0020 2965 | O5_0155 2966 | O15_0046 2967 | O1_0081 2968 | S7_0112 2969 | T7_0141 2970 | O12_0001 2971 | T8_0097 2972 | S4_0062 2973 | O4_0063 2974 | O7_0103 2975 | O14_0038 2976 | T4_0101 2977 | O14_0129 2978 | O6_0080 2979 | S3_0102 2980 | O8_0125 2981 | O26_0062 2982 | O18_0020 2983 | O6_0031 2984 | O15_0104 2985 | O1_0089 2986 | O8_0081 2987 | O16_0032 2988 | T8_0059 2989 | S7_0105 2990 | T2_0016 2991 | T7_0124 2992 | T2_0037 2993 | O22_0028 2994 | O12_0129 2995 | O18_0062 2996 | O14_0036 2997 | S3_0085 2998 | O23_0026 2999 | O18_0021 3000 | O8_0048 3001 | O2_0032 3002 | S3_0113 3003 | S4_0120 3004 | T6_0069 3005 | O14_0084 3006 | T7_0052 3007 | O19_0095 3008 | S1_0028 3009 | S5_0108 3010 | S7_0116 3011 | O12_0024 3012 | S3_0017 3013 | S8_0034 3014 | O19_0062 3015 | S6_0047 3016 | O14_0040 3017 | O21_0094 3018 | O2_0090 3019 | S7_0089 3020 | O26_0043 3021 | S5_0037 3022 | O5_0021 3023 | S5_0053 3024 | O12_0040 3025 | T3_0121 3026 | T5_0021 3027 | O17_0077 3028 | T7_0083 3029 | O10_0029 3030 | O21_0129 3031 | O12_0106 3032 | O6_0126 3033 | S7_0021 3034 | O7_0073 3035 | O12_0137 3036 | O19_0009 3037 | O6_0002 3038 | O24_0133 3039 | S8_0111 3040 | S1_0035 3041 | O9_0009 3042 | O23_0121 3043 | O1_0035 3044 | O24_0061 3045 | S7_0136 3046 | O23_0063 3047 | S2_0128 3048 | O5_0036 3049 | O25_0087 3050 | O18_0018 3051 | O7_0098 3052 | S7_0093 3053 | S8_0126 3054 | O13_0118 3055 | T5_0102 3056 | O9_0131 3057 | S4_0018 3058 | T7_0110 3059 | O11_0122 3060 | S8_0095 3061 | O9_0027 3062 | T5_0068 3063 | S5_0003 3064 | O22_0061 3065 | O17_0050 3066 | O24_0087 3067 | S7_0074 3068 | O12_0069 3069 | O3_0022 3070 | O14_0103 3071 | O21_0063 3072 | O4_0015 3073 | T1_0045 3074 | S4_0028 3075 | O24_0111 3076 | O3_0014 3077 | O11_0059 3078 | O14_0049 3079 | O7_0094 3080 | O15_0072 3081 | O12_0082 3082 | S8_0067 3083 | O6_0138 3084 | S2_0097 3085 | O7_0139 3086 | T3_0073 3087 | O8_0141 3088 | O11_0121 3089 | O25_0078 3090 | O9_0134 3091 | O21_0034 3092 | O1_0121 3093 | O1_0084 3094 | T4_0079 3095 | S2_0122 3096 | O26_0128 3097 | O14_0134 3098 | S5_0084 3099 | T8_0021 3100 | S4_0050 3101 | S7_0120 3102 | S2_0074 3103 | O20_0056 3104 | O26_0042 3105 | S8_0058 3106 | S5_0059 3107 | O9_0020 3108 | O10_0002 3109 | T4_0070 3110 | O17_0016 3111 | S5_0025 3112 | O6_0043 3113 | O17_0118 3114 | O25_0080 3115 | S2_0027 3116 | O8_0085 3117 | T3_0058 3118 | O5_0072 3119 | O25_0112 3120 | T5_0086 3121 | T6_0002 3122 | O4_0069 3123 | O25_0121 3124 | O15_0054 3125 | S7_0128 3126 | T1_0063 3127 | S3_0027 3128 | O14_0128 3129 | O1_0034 3130 | O24_0130 3131 | O20_0046 3132 | T1_0110 3133 | O16_0056 3134 | O25_0097 3135 | S4_0024 3136 | S2_0124 3137 | O12_0119 3138 | O15_0123 3139 | O3_0079 3140 | O26_0090 3141 | O19_0010 3142 | S7_0095 3143 | T1_0057 3144 | O22_0107 3145 | O11_0109 3146 | O19_0033 3147 | S3_0063 3148 | O8_0053 3149 | O4_0053 3150 | S8_0114 3151 | S2_0022 3152 | O26_0077 3153 | T7_0113 3154 | T2_0064 3155 | O4_0093 3156 | O4_0044 3157 | S4_0005 3158 | O19_0085 3159 | O8_0147 3160 | O13_0100 3161 | T7_0066 3162 | O3_0058 3163 | O16_0073 3164 | T3_0084 3165 | T1_0027 3166 | O16_0002 3167 | S8_0069 3168 | O9_0064 3169 | S6_0093 3170 | O2_0072 3171 | O4_0052 3172 | O22_0106 3173 | S6_0115 3174 | S5_0017 3175 | O1_0112 3176 | O5_0006 3177 | O1_0119 3178 | S8_0112 3179 | T7_0004 3180 | O12_0045 3181 | O26_0093 3182 | O11_0124 3183 | S1_0087 3184 | O16_0041 3185 | O13_0006 3186 | T3_0041 3187 | S8_0027 3188 | T3_0118 3189 | S7_0029 3190 | O4_0031 3191 | T5_0064 3192 | O3_0063 3193 | O12_0125 3194 | O2_0126 3195 | O11_0048 3196 | O13_0089 3197 | O15_0082 3198 | O21_0002 3199 | O5_0117 3200 | T5_0004 3201 | T5_0029 3202 | O6_0058 3203 | S6_0022 3204 | O23_0027 3205 | O24_0038 3206 | S6_0077 3207 | S2_0047 3208 | T4_0105 3209 | S1_0046 3210 | S6_0107 3211 | O5_0079 3212 | O10_0085 3213 | O17_0083 3214 | O6_0037 3215 | O20_0068 3216 | T4_0011 3217 | O15_0041 3218 | T3_0081 3219 | O5_0040 3220 | O11_0036 3221 | O7_0077 3222 | O23_0106 3223 | O23_0084 3224 | S3_0129 3225 | O20_0137 3226 | O20_0122 3227 | O11_0047 3228 | O23_0101 3229 | T1_0081 3230 | T6_0049 3231 | O14_0027 3232 | S4_0042 3233 | O9_0043 3234 | O17_0099 3235 | O2_0019 3236 | S7_0080 3237 | O12_0126 3238 | T8_0113 3239 | O11_0118 3240 | O6_0025 3241 | O7_0022 3242 | T7_0003 3243 | S8_0115 3244 | T4_0115 3245 | S5_0095 3246 | O13_0125 3247 | O3_0031 3248 | O26_0041 3249 | O3_0045 3250 | S2_0081 3251 | O19_0110 3252 | O16_0033 3253 | S3_0026 3254 | O3_0034 3255 | O12_0132 3256 | O18_0019 3257 | O20_0053 3258 | O15_0058 3259 | O11_0088 3260 | O6_0059 3261 | S7_0001 3262 | S5_0103 3263 | T2_0095 3264 | O1_0125 3265 | S8_0033 3266 | S3_0059 3267 | O14_0132 3268 | T6_0073 3269 | O26_0058 3270 | T1_0041 3271 | T4_0094 3272 | T2_0110 3273 | S5_0125 3274 | O4_0043 3275 | O6_0023 3276 | O24_0124 3277 | O8_0020 3278 | O11_0085 3279 | T7_0080 3280 | O24_0108 3281 | O22_0021 3282 | O18_0099 3283 | O18_0068 3284 | O13_0045 3285 | S8_0071 3286 | T2_0041 3287 | O2_0059 3288 | S5_0104 3289 | O24_0076 3290 | O11_0007 3291 | T1_0022 3292 | O8_0036 3293 | O8_0140 3294 | T5_0070 3295 | S6_0122 3296 | O21_0057 3297 | O9_0039 3298 | T8_0031 3299 | O5_0132 3300 | T6_0001 3301 | O18_0040 3302 | O19_0134 3303 | O21_0043 3304 | O7_0025 3305 | O8_0114 3306 | T1_0104 3307 | S2_0008 3308 | T4_0024 3309 | S7_0133 3310 | O9_0092 3311 | O5_0106 3312 | T8_0100 3313 | T4_0044 3314 | T5_0131 3315 | O24_0074 3316 | T2_0039 3317 | O22_0004 3318 | O1_0065 3319 | S8_0001 3320 | T7_0111 3321 | O5_0081 3322 | O8_0058 3323 | S2_0069 3324 | O2_0077 3325 | O22_0131 3326 | S4_0049 3327 | O24_0104 3328 | T6_0042 3329 | O16_0061 3330 | T3_0092 3331 | S5_0118 3332 | O16_0029 3333 | T4_0080 3334 | O13_0011 3335 | S3_0074 3336 | T8_0112 3337 | O13_0099 3338 | O17_0062 3339 | O13_0008 3340 | O12_0052 3341 | O8_0064 3342 | O25_0055 3343 | S3_0093 3344 | T5_0073 3345 | O24_0119 3346 | O21_0064 3347 | O14_0010 3348 | T4_0077 3349 | O7_0133 3350 | O7_0041 3351 | T4_0028 3352 | O10_0053 3353 | O6_0011 3354 | T7_0063 3355 | T4_0076 3356 | O18_0088 3357 | O20_0041 3358 | O10_0024 3359 | S7_0012 3360 | S4_0102 3361 | O20_0004 3362 | S4_0029 3363 | O6_0047 3364 | S5_0097 3365 | T6_0074 3366 | T1_0068 3367 | O6_0015 3368 | O23_0012 3369 | O22_0130 3370 | O26_0117 3371 | S3_0123 3372 | O17_0019 3373 | O21_0098 3374 | O9_0104 3375 | T5_0047 3376 | O1_0060 3377 | O14_0102 3378 | O15_0009 3379 | O2_0044 3380 | O17_0086 3381 | O8_0123 3382 | O1_0130 3383 | O12_0084 3384 | O13_0039 3385 | O23_0019 3386 | O16_0027 3387 | O20_0065 3388 | O6_0001 3389 | O1_0006 3390 | O12_0041 3391 | O20_0059 3392 | S5_0052 3393 | T3_0087 3394 | O4_0108 3395 | T8_0060 3396 | S6_0079 3397 | O26_0118 3398 | S6_0036 3399 | O21_0105 3400 | O6_0068 3401 | O2_0096 3402 | O23_0074 3403 | O3_0113 3404 | O16_0128 3405 | O18_0104 3406 | T6_0063 3407 | T4_0025 3408 | O11_0089 3409 | O25_0086 3410 | O7_0007 3411 | O17_0069 3412 | O14_0063 3413 | S5_0092 3414 | O25_0028 3415 | T8_0008 3416 | T5_0063 3417 | S8_0056 3418 | T1_0099 3419 | S6_0009 3420 | T1_0026 3421 | O14_0039 3422 | O26_0040 3423 | O11_0020 3424 | T5_0005 3425 | S6_0102 3426 | O17_0046 3427 | T8_0025 3428 | O2_0068 3429 | O25_0004 3430 | O20_0102 3431 | T8_0084 3432 | T2_0144 3433 | S6_0007 3434 | O22_0112 3435 | O16_0109 3436 | O19_0003 3437 | O9_0127 3438 | S7_0027 3439 | O8_0086 3440 | T4_0015 3441 | T2_0097 3442 | T6_0068 3443 | O22_0069 3444 | O13_0123 3445 | S7_0059 3446 | O12_0020 3447 | O3_0086 3448 | T1_0035 3449 | O18_0114 3450 | T4_0117 3451 | O25_0051 3452 | O4_0048 3453 | O17_0060 3454 | O22_0085 3455 | O10_0093 3456 | S5_0126 3457 | T5_0077 3458 | O13_0044 3459 | O22_0113 3460 | T6_0087 3461 | O17_0058 3462 | S4_0099 3463 | O12_0088 3464 | T5_0071 3465 | S3_0016 3466 | O16_0116 3467 | O20_0023 3468 | S7_0016 3469 | O18_0123 3470 | O26_0097 3471 | O8_0075 3472 | O22_0076 3473 | T2_0026 3474 | O1_0010 3475 | O6_0022 3476 | O5_0025 3477 | O23_0070 3478 | O14_0093 3479 | O25_0046 3480 | O2_0080 3481 | O8_0029 3482 | O8_0136 3483 | S3_0060 3484 | T6_0038 3485 | S1_0110 3486 | S5_0034 3487 | S3_0099 3488 | O8_0130 3489 | O17_0045 3490 | O16_0111 3491 | S1_0128 3492 | O23_0083 3493 | O16_0071 3494 | S8_0037 3495 | O10_0079 3496 | T6_0126 3497 | O23_0104 3498 | O26_0133 3499 | O19_0025 3500 | O1_0097 3501 | T4_0001 3502 | O25_0076 3503 | T2_0030 3504 | O25_0126 3505 | O14_0120 3506 | O8_0010 3507 | T4_0009 3508 | O19_0061 3509 | O14_0065 3510 | S1_0025 3511 | O14_0056 3512 | O22_0032 3513 | S1_0108 3514 | S4_0004 3515 | S3_0010 3516 | T1_0092 3517 | O2_0065 3518 | O11_0043 3519 | T1_0100 3520 | O22_0091 3521 | O17_0061 3522 | S4_0040 3523 | O2_0042 3524 | O8_0102 3525 | O18_0033 3526 | O17_0129 3527 | O2_0088 3528 | O10_0050 3529 | O5_0055 3530 | O11_0037 3531 | O10_0083 3532 | T2_0130 3533 | S7_0009 3534 | T8_0018 3535 | O4_0039 3536 | O2_0109 3537 | O12_0134 3538 | O24_0001 3539 | T3_0109 3540 | O25_0056 3541 | O7_0129 3542 | O22_0116 3543 | T8_0067 3544 | O6_0132 3545 | O5_0078 3546 | O18_0118 3547 | O19_0059 3548 | O24_0005 3549 | T8_0017 3550 | T1_0120 3551 | O9_0050 3552 | O2_0047 3553 | O7_0033 3554 | S7_0007 3555 | O5_0045 3556 | O13_0107 3557 | O2_0099 3558 | O12_0077 3559 | O16_0015 3560 | T8_0051 3561 | S6_0029 3562 | O3_0037 3563 | O10_0064 3564 | O21_0022 3565 | O18_0071 3566 | O1_0059 3567 | O10_0095 3568 | O1_0044 3569 | O23_0088 3570 | S1_0111 3571 | O15_0048 3572 | S7_0026 3573 | O14_0095 3574 | O10_0014 3575 | O5_0107 3576 | O16_0021 3577 | O8_0069 3578 | O7_0125 3579 | O6_0136 3580 | S6_0032 3581 | O18_0038 3582 | O12_0008 3583 | S5_0045 3584 | O3_0002 3585 | T6_0011 3586 | O9_0121 3587 | O15_0110 3588 | O3_0047 3589 | T8_0047 3590 | S6_0074 3591 | O13_0001 3592 | O19_0039 3593 | O8_0098 3594 | T6_0081 3595 | O23_0085 3596 | S8_0099 3597 | O7_0057 3598 | T2_0101 3599 | O18_0045 3600 | O3_0068 3601 | O10_0100 3602 | T2_0028 3603 | T6_0051 3604 | S2_0109 3605 | O1_0018 3606 | T7_0050 3607 | S2_0018 3608 | O8_0047 3609 | S8_0052 3610 | S3_0073 3611 | O19_0140 3612 | S1_0073 3613 | O12_0047 3614 | S4_0044 3615 | O1_0031 3616 | O1_0036 3617 | O4_0054 3618 | T8_0094 3619 | O10_0127 3620 | T3_0016 3621 | O9_0028 3622 | O19_0124 3623 | T4_0103 3624 | O3_0042 3625 | O12_0114 3626 | O2_0095 3627 | T8_0035 3628 | O12_0083 3629 | O5_0057 3630 | O8_0087 3631 | S3_0095 3632 | T6_0112 3633 | S4_0020 3634 | O20_0105 3635 | S8_0118 3636 | S7_0057 3637 | O22_0063 3638 | O7_0084 3639 | O13_0095 3640 | O1_0077 3641 | O15_0055 3642 | T4_0057 3643 | O14_0116 3644 | O23_0035 3645 | S1_0026 3646 | O25_0104 3647 | O11_0125 3648 | O3_0110 3649 | T2_0138 3650 | O2_0001 3651 | S4_0083 3652 | S1_0033 3653 | T6_0086 3654 | O4_0036 3655 | O16_0012 3656 | O20_0086 3657 | O5_0022 3658 | O5_0051 3659 | S7_0139 3660 | O7_0017 3661 | O24_0088 3662 | S1_0079 3663 | O23_0096 3664 | O4_0097 3665 | O14_0054 3666 | T6_0020 3667 | S4_0116 3668 | O24_0120 3669 | S7_0035 3670 | O26_0114 3671 | O3_0089 3672 | O15_0002 3673 | S7_0075 3674 | O2_0004 3675 | O9_0005 3676 | T4_0052 3677 | O19_0040 3678 | O25_0026 3679 | S2_0007 3680 | T7_0144 3681 | O17_0095 3682 | S8_0100 3683 | O8_0119 3684 | O2_0069 3685 | S3_0083 3686 | O13_0051 3687 | O18_0049 3688 | O8_0037 3689 | S5_0121 3690 | O5_0074 3691 | T5_0051 3692 | O16_0125 3693 | O23_0042 3694 | T3_0122 3695 | S8_0122 3696 | T7_0119 3697 | S8_0044 3698 | O14_0121 3699 | O8_0144 3700 | T2_0053 3701 | O11_0079 3702 | O5_0035 3703 | O17_0105 3704 | T4_0085 3705 | O11_0029 3706 | O4_0029 3707 | O16_0079 3708 | O24_0092 3709 | O5_0104 3710 | S8_0007 3711 | O24_0002 3712 | O21_0014 3713 | O19_0125 3714 | O2_0057 3715 | O11_0106 3716 | S1_0114 3717 | O25_0037 3718 | O11_0129 3719 | O6_0101 3720 | O9_0114 3721 | O25_0111 3722 | O14_0014 3723 | S3_0082 3724 | O5_0013 3725 | O8_0077 3726 | O17_0013 3727 | O13_0061 3728 | T6_0005 3729 | O3_0073 3730 | O3_0114 3731 | S3_0004 3732 | O3_0041 3733 | S1_0003 3734 | O5_0094 3735 | S4_0073 3736 | S1_0042 3737 | T3_0089 3738 | S4_0035 3739 | O21_0068 3740 | S6_0130 3741 | O17_0097 3742 | O7_0035 3743 | S5_0069 3744 | O13_0090 3745 | T1_0094 3746 | O11_0057 3747 | S4_0002 3748 | T8_0134 3749 | O14_0110 3750 | O5_0066 3751 | O22_0086 3752 | O19_0104 3753 | O13_0104 3754 | S6_0126 3755 | O17_0081 3756 | T2_0094 3757 | S3_0126 3758 | S4_0060 3759 | O2_0045 3760 | T6_0123 3761 | O23_0057 3762 | O4_0117 3763 | O9_0014 3764 | S8_0102 3765 | O25_0048 3766 | O24_0060 3767 | O20_0016 3768 | O2_0052 3769 | O2_0028 3770 | O3_0081 3771 | S8_0062 3772 | O1_0076 3773 | S2_0026 3774 | O7_0027 3775 | O4_0050 3776 | S3_0044 3777 | O21_0009 3778 | O17_0122 3779 | O8_0009 3780 | O9_0110 3781 | O12_0139 3782 | O26_0010 3783 | O1_0058 3784 | O3_0131 3785 | T4_0119 3786 | O18_0067 3787 | T4_0008 3788 | T7_0086 3789 | O2_0130 3790 | O12_0043 3791 | O10_0128 3792 | O6_0038 3793 | T2_0125 3794 | O12_0044 3795 | O17_0107 3796 | T1_0129 3797 | O4_0014 3798 | S2_0121 3799 | S6_0004 3800 | O9_0085 3801 | S1_0056 3802 | O17_0009 3803 | T7_0058 3804 | O16_0039 3805 | T7_0104 3806 | S5_0016 3807 | O9_0062 3808 | O6_0050 3809 | O11_0082 3810 | O22_0029 3811 | O14_0058 3812 | S1_0054 3813 | T2_0081 3814 | T5_0129 3815 | O3_0120 3816 | T1_0018 3817 | O24_0073 3818 | T3_0090 3819 | S1_0023 3820 | O17_0038 3821 | O7_0034 3822 | O24_0112 3823 | O3_0059 3824 | T6_0006 3825 | T7_0135 3826 | T5_0083 3827 | S5_0065 3828 | T8_0037 3829 | T6_0083 3830 | S8_0075 3831 | T1_0087 3832 | T6_0070 3833 | O24_0046 3834 | T5_0112 3835 | O5_0068 3836 | O26_0031 3837 | S4_0101 3838 | O19_0074 3839 | T3_0117 3840 | O17_0071 3841 | O19_0077 3842 | O6_0114 3843 | O3_0067 3844 | O14_0105 3845 | O19_0067 3846 | O24_0096 3847 | O15_0060 3848 | O20_0014 3849 | O2_0033 3850 | O23_0076 3851 | O24_0091 3852 | O1_0129 3853 | O3_0075 3854 | O12_0015 3855 | O20_0095 3856 | O19_0090 3857 | O23_0128 3858 | O26_0026 3859 | T6_0056 3860 | O10_0006 3861 | O21_0012 3862 | T7_0021 3863 | S7_0056 3864 | O3_0104 3865 | O25_0015 3866 | O24_0110 3867 | O6_0081 3868 | O1_0039 3869 | O11_0081 3870 | O15_0059 3871 | T6_0044 3872 | S1_0134 3873 | O13_0029 3874 | O18_0116 3875 | O13_0043 3876 | O8_0051 3877 | S3_0023 3878 | T6_0103 3879 | O1_0113 3880 | O5_0110 3881 | O26_0079 3882 | O3_0097 3883 | S1_0066 3884 | O20_0108 3885 | S2_0034 3886 | O7_0097 3887 | S5_0024 3888 | O15_0021 3889 | T4_0086 3890 | O8_0116 3891 | O8_0137 3892 | O24_0044 3893 | S8_0096 3894 | T1_0044 3895 | S6_0024 3896 | O23_0082 3897 | O4_0074 3898 | O4_0128 3899 | S4_0017 3900 | T7_0142 3901 | T1_0085 3902 | O4_0062 3903 | O23_0038 3904 | O8_0059 3905 | T3_0005 3906 | T3_0101 3907 | O5_0024 3908 | S4_0053 3909 | O25_0100 3910 | O13_0062 3911 | O7_0123 3912 | O24_0067 3913 | O9_0130 3914 | S1_0040 3915 | T2_0107 3916 | O26_0007 3917 | S7_0024 3918 | O10_0009 3919 | O24_0031 3920 | T4_0055 3921 | S2_0025 3922 | O26_0018 3923 | T2_0022 3924 | O5_0058 3925 | O6_0041 3926 | S3_0110 3927 | S4_0068 3928 | S6_0005 3929 | T8_0027 3930 | T1_0071 3931 | O14_0061 3932 | O22_0062 3933 | T1_0135 3934 | T3_0038 3935 | O1_0062 3936 | O12_0035 3937 | O23_0009 3938 | O23_0126 3939 | O25_0009 3940 | O21_0056 3941 | T8_0048 3942 | O4_0017 3943 | O26_0129 3944 | S6_0031 3945 | O26_0044 3946 | T8_0068 3947 | S5_0047 3948 | S1_0009 3949 | O12_0006 3950 | O5_0091 3951 | O20_0037 3952 | S4_0085 3953 | O18_0070 3954 | O2_0091 3955 | O2_0018 3956 | T8_0132 3957 | O5_0158 3958 | O17_0111 3959 | O8_0118 3960 | S8_0031 3961 | S5_0109 3962 | O17_0015 3963 | O23_0081 3964 | T2_0070 3965 | O11_0083 3966 | O21_0083 3967 | O20_0055 3968 | S7_0050 3969 | O19_0117 3970 | O3_0036 3971 | S2_0037 3972 | T1_0133 3973 | S5_0054 3974 | S1_0100 3975 | O14_0139 3976 | T6_0120 3977 | O21_0026 3978 | O6_0134 3979 | O22_0070 3980 | O10_0035 3981 | O20_0076 3982 | O18_0022 3983 | O24_0048 3984 | O18_0111 3985 | O16_0034 3986 | O15_0026 3987 | O3_0126 3988 | S3_0096 3989 | O16_0091 3990 | S1_0107 3991 | O15_0087 3992 | O26_0112 3993 | O19_0130 3994 | O7_0106 3995 | O11_0006 3996 | T2_0019 3997 | O2_0102 3998 | T7_0019 3999 | O20_0061 4000 | T4_0092 4001 | O17_0063 4002 | O24_0028 4003 | O19_0123 4004 | O20_0029 4005 | T8_0073 4006 | O8_0035 4007 | S7_0077 4008 | O18_0008 4009 | O20_0104 4010 | S4_0034 4011 | T4_0123 4012 | T2_0060 4013 | O14_0062 4014 | O14_0124 4015 | T2_0117 4016 | O13_0009 4017 | O5_0030 4018 | O6_0110 4019 | O2_0082 4020 | O11_0008 4021 | O10_0092 4022 | O1_0057 4023 | S6_0057 4024 | T5_0091 4025 | O1_0019 4026 | O5_0131 4027 | O17_0014 4028 | T6_0022 4029 | T2_0048 4030 | O18_0127 4031 | S4_0129 4032 | S5_0015 4033 | O25_0117 4034 | O26_0071 4035 | O7_0076 4036 | O15_0071 4037 | O20_0008 4038 | T5_0056 4039 | O16_0133 4040 | O9_0058 4041 | S3_0127 4042 | S6_0117 4043 | O14_0031 4044 | T1_0114 4045 | S7_0135 4046 | O8_0042 4047 | T8_0083 4048 | O15_0092 4049 | O10_0017 4050 | O18_0010 4051 | T2_0057 4052 | O12_0076 4053 | O18_0058 4054 | S3_0039 4055 | O24_0040 4056 | T3_0123 4057 | O10_0023 4058 | O22_0045 4059 | O10_0032 4060 | T7_0035 4061 | O23_0071 4062 | O20_0030 4063 | O6_0137 4064 | T8_0081 4065 | T3_0025 4066 | T1_0006 4067 | O19_0021 4068 | O3_0015 4069 | O19_0114 4070 | S1_0052 4071 | O3_0054 4072 | O13_0007 4073 | S8_0013 4074 | O19_0115 4075 | S8_0089 4076 | O16_0023 4077 | T3_0033 4078 | O8_0032 4079 | T6_0117 4080 | S6_0120 4081 | T4_0059 4082 | T6_0090 4083 | O8_0004 4084 | O17_0117 4085 | S6_0015 4086 | T5_0067 4087 | O6_0044 4088 | S8_0079 4089 | T8_0066 4090 | O10_0118 4091 | O24_0080 4092 | O10_0013 4093 | O5_0159 4094 | O20_0058 4095 | O7_0134 4096 | O16_0108 4097 | S3_0100 4098 | T2_0001 4099 | T8_0115 4100 | T4_0039 4101 | O21_0050 4102 | T5_0115 4103 | O21_0117 4104 | O2_0081 4105 | T3_0021 4106 | O5_0005 4107 | O3_0051 4108 | O22_0002 4109 | O11_0105 4110 | O20_0019 4111 | O14_0078 4112 | O6_0048 4113 | O21_0095 4114 | O9_0099 4115 | T3_0018 4116 | O9_0107 4117 | O15_0121 4118 | O8_0061 4119 | O25_0079 4120 | O21_0028 4121 | S3_0009 4122 | O4_0045 4123 | O8_0134 4124 | O6_0077 4125 | O15_0063 4126 | T6_0119 4127 | T7_0014 4128 | T2_0119 4129 | S3_0045 4130 | S4_0038 4131 | S8_0009 4132 | O14_0096 4133 | S8_0028 4134 | O16_0001 4135 | O15_0044 4136 | T3_0140 4137 | T5_0017 4138 | O23_0066 4139 | O24_0004 4140 | O7_0065 4141 | O26_0033 4142 | O7_0024 4143 | O24_0059 4144 | O4_0025 4145 | T4_0064 4146 | O22_0066 4147 | S3_0052 4148 | S4_0132 4149 | T2_0003 4150 | T2_0067 4151 | S2_0131 4152 | O3_0100 4153 | O25_0032 4154 | O11_0010 4155 | O7_0114 4156 | O9_0042 4157 | O21_0013 4158 | O16_0037 4159 | O9_0101 4160 | O6_0026 4161 | O10_0043 4162 | T1_0059 4163 | S6_0039 4164 | O14_0008 4165 | S3_0005 4166 | T6_0013 4167 | O7_0055 4168 | S8_0091 4169 | O20_0097 4170 | S5_0106 4171 | O3_0056 4172 | O8_0122 4173 | O22_0090 4174 | S7_0084 4175 | T6_0018 4176 | T7_0054 4177 | O3_0102 4178 | O17_0003 4179 | O19_0011 4180 | O1_0009 4181 | O14_0083 4182 | O15_0068 4183 | O4_0095 4184 | O5_0077 4185 | S1_0013 4186 | T3_0111 4187 | S5_0115 4188 | T6_0036 4189 | O12_0064 4190 | O23_0068 4191 | T1_0111 4192 | S3_0094 4193 | T7_0040 4194 | O15_0030 4195 | T7_0061 4196 | O8_0016 4197 | O20_0084 4198 | O3_0118 4199 | T5_0124 4200 | O11_0031 4201 | S2_0082 4202 | S5_0124 4203 | S8_0042 4204 | S2_0042 4205 | O4_0065 4206 | O25_0072 4207 | O20_0043 4208 | O1_0003 4209 | O3_0105 4210 | S3_0062 4211 | O15_0077 4212 | S5_0051 4213 | O23_0095 4214 | S3_0058 4215 | O1_0073 4216 | O23_0080 4217 | S3_0089 4218 | T7_0070 4219 | S6_0008 4220 | O5_0145 4221 | O4_0005 4222 | T1_0139 4223 | O18_0133 4224 | O22_0005 4225 | O5_0039 4226 | O15_0075 4227 | O1_0026 4228 | S7_0070 4229 | O16_0118 4230 | O22_0026 4231 | T2_0127 4232 | O14_0079 4233 | O17_0010 4234 | O18_0086 4235 | T8_0034 4236 | O8_0103 4237 | O4_0115 4238 | O16_0017 4239 | O20_0031 4240 | T4_0022 4241 | S8_0097 4242 | T5_0072 4243 | T1_0109 4244 | T2_0124 4245 | O26_0069 4246 | O10_0114 4247 | O5_0150 4248 | O13_0027 4249 | O3_0072 4250 | O23_0114 4251 | O26_0020 4252 | O26_0123 4253 | O4_0066 4254 | O13_0055 4255 | T8_0079 4256 | O17_0072 4257 | T1_0121 4258 | O2_0127 4259 | S8_0017 4260 | O20_0115 4261 | O4_0046 4262 | O12_0023 4263 | S3_0041 4264 | O25_0091 4265 | O25_0042 4266 | O7_0021 4267 | O25_0069 4268 | O11_0021 4269 | O2_0113 4270 | O21_0119 4271 | O2_0092 4272 | T5_0009 4273 | O15_0034 4274 | O24_0114 4275 | S2_0004 4276 | T3_0064 4277 | S7_0025 4278 | O12_0113 4279 | T4_0046 4280 | O13_0016 4281 | T6_0003 4282 | O17_0104 4283 | O22_0132 4284 | O5_0098 4285 | T3_0022 4286 | S5_0071 4287 | T7_0007 4288 | T4_0049 4289 | O6_0007 4290 | O14_0141 4291 | O15_0131 4292 | O20_0022 4293 | O22_0096 4294 | O5_0023 4295 | T2_0068 4296 | O9_0132 4297 | S3_0020 4298 | T1_0003 4299 | O24_0015 4300 | O17_0021 4301 | O24_0127 4302 | S6_0061 4303 | S8_0076 4304 | O13_0109 4305 | S1_0018 4306 | O17_0094 4307 | O20_0096 4308 | O23_0044 4309 | O16_0097 4310 | O14_0068 4311 | O12_0057 4312 | O7_0111 4313 | S1_0065 4314 | T4_0051 4315 | O19_0119 4316 | O12_0031 4317 | O15_0126 4318 | O2_0117 4319 | O9_0011 4320 | O21_0111 4321 | O21_0130 4322 | T2_0143 4323 | T6_0100 4324 | O4_0094 4325 | O19_0051 4326 | S1_0105 4327 | S7_0038 4328 | T1_0102 4329 | O10_0078 4330 | O13_0069 4331 | T2_0102 4332 | T6_0108 4333 | O5_0018 4334 | S6_0131 4335 | T6_0029 4336 | O1_0046 4337 | T6_0082 4338 | O6_0046 4339 | O13_0081 4340 | O7_0064 4341 | T3_0060 4342 | O3_0006 4343 | O24_0063 4344 | T7_0006 4345 | O2_0024 4346 | O2_0006 4347 | T8_0002 4348 | O14_0043 4349 | O7_0113 4350 | T3_0079 4351 | O7_0045 4352 | O24_0016 4353 | T7_0132 4354 | O21_0038 4355 | O21_0092 4356 | T6_0027 4357 | O23_0017 4358 | O4_0098 4359 | O22_0016 4360 | O17_0076 4361 | O11_0028 4362 | O17_0005 4363 | O11_0040 4364 | T3_0042 4365 | O7_0059 4366 | O5_0042 4367 | S2_0090 4368 | O14_0076 4369 | S1_0068 4370 | O7_0015 4371 | O10_0068 4372 | O11_0069 4373 | O7_0120 4374 | T2_0083 4375 | T4_0066 4376 | O9_0091 4377 | O14_0013 4378 | O21_0123 4379 | S1_0119 4380 | T8_0070 4381 | O9_0079 4382 | S3_0015 4383 | O2_0060 4384 | S3_0030 4385 | S2_0039 4386 | O13_0026 4387 | O16_0088 4388 | S3_0133 4389 | O20_0083 4390 | O11_0038 4391 | O10_0040 4392 | T2_0128 4393 | O11_0002 4394 | S5_0079 4395 | S1_0115 4396 | O4_0001 4397 | S8_0070 4398 | T1_0101 4399 | O4_0019 4400 | S1_0118 4401 | O19_0063 4402 | O6_0071 4403 | O10_0089 4404 | O22_0017 4405 | O11_0097 4406 | O13_0088 4407 | O1_0016 4408 | O1_0088 4409 | O14_0074 4410 | O17_0023 4411 | O18_0004 4412 | O20_0020 4413 | O7_0115 4414 | O22_0102 4415 | S2_0086 4416 | S3_0080 4417 | S5_0122 4418 | O9_0089 4419 | O6_0083 4420 | O10_0066 4421 | S6_0127 4422 | S1_0034 4423 | T5_0121 4424 | O9_0133 4425 | O3_0129 4426 | O3_0044 4427 | O10_0073 4428 | S6_0096 4429 | O1_0061 4430 | O16_0036 4431 | O4_0122 4432 | S7_0098 4433 | T1_0089 4434 | O13_0091 4435 | S7_0042 4436 | O14_0042 4437 | O7_0074 4438 | O1_0091 4439 | O1_0047 4440 | O7_0143 4441 | T3_0039 4442 | O15_0109 4443 | O1_0014 4444 | T1_0023 4445 | O26_0070 4446 | O19_0032 4447 | O2_0076 4448 | O26_0012 4449 | O16_0022 4450 | T4_0068 4451 | S4_0001 4452 | S7_0006 4453 | O13_0066 4454 | O1_0063 4455 | O8_0060 4456 | O21_0051 4457 | O14_0104 4458 | S3_0034 4459 | O17_0090 4460 | O11_0101 4461 | O3_0116 4462 | O14_0090 4463 | S2_0125 4464 | O17_0108 4465 | O14_0125 4466 | O22_0119 4467 | T5_0120 4468 | O20_0140 4469 | O25_0006 4470 | S6_0118 4471 | O18_0083 4472 | T4_0078 4473 | O20_0073 4474 | T3_0085 4475 | O20_0087 4476 | T7_0045 4477 | O7_0042 4478 | S1_0047 4479 | O4_0105 4480 | O13_0106 4481 | O16_0028 4482 | O20_0124 4483 | S6_0075 4484 | T1_0112 4485 | O12_0118 4486 | O8_0105 4487 | O15_0097 4488 | O2_0029 4489 | T4_0073 4490 | O21_0120 4491 | O4_0070 4492 | O6_0094 4493 | O12_0022 4494 | O20_0054 4495 | S6_0087 4496 | S7_0058 4497 | S1_0078 4498 | O7_0028 4499 | O26_0014 4500 | T8_0125 4501 | O12_0068 4502 | S3_0047 4503 | S6_0037 4504 | O22_0019 4505 | O18_0096 4506 | S4_0109 4507 | O23_0130 4508 | O10_0075 4509 | T7_0139 4510 | S6_0110 4511 | O6_0004 4512 | S7_0142 4513 | O1_0072 4514 | S2_0107 4515 | S7_0127 4516 | S2_0120 4517 | T7_0034 4518 | T2_0106 4519 | T5_0114 4520 | S8_0083 4521 | O23_0016 4522 | O23_0005 4523 | S4_0133 4524 | O8_0073 4525 | O1_0007 4526 | O22_0083 4527 | O13_0105 4528 | S7_0010 4529 | T1_0072 4530 | O20_0075 4531 | O24_0027 4532 | O5_0060 4533 | S8_0117 4534 | T1_0054 4535 | T5_0125 4536 | O20_0002 4537 | O18_0091 4538 | O21_0106 4539 | S2_0068 4540 | O13_0077 4541 | O2_0016 4542 | O13_0041 4543 | S1_0036 4544 | O7_0060 4545 | O19_0084 4546 | S8_0030 4547 | O24_0021 4548 | O1_0025 4549 | S4_0036 4550 | O5_0010 4551 | O15_0103 4552 | T8_0022 4553 | O25_0102 4554 | S5_0009 4555 | O3_0060 4556 | O22_0100 4557 | S2_0045 4558 | O15_0003 4559 | O2_0070 4560 | O13_0005 4561 | O7_0043 4562 | T2_0061 4563 | T8_0121 4564 | T8_0058 4565 | O10_0044 4566 | T7_0016 4567 | O24_0032 4568 | S7_0069 4569 | O3_0090 4570 | S4_0113 4571 | O16_0082 4572 | O13_0002 4573 | O17_0092 4574 | O13_0078 4575 | T1_0080 4576 | T8_0029 4577 | O26_0096 4578 | O7_0083 4579 | O23_0116 4580 | T3_0059 4581 | T3_0086 4582 | O21_0024 4583 | S6_0123 4584 | T2_0080 4585 | T5_0048 4586 | S4_0007 4587 | O11_0102 4588 | S8_0022 4589 | O5_0108 4590 | O1_0027 4591 | T7_0041 4592 | O11_0042 4593 | T6_0080 4594 | O1_0107 4595 | O13_0031 4596 | S5_0049 4597 | O13_0116 4598 | O18_0032 4599 | T7_0002 4600 | O26_0120 4601 | S3_0055 4602 | T7_0055 4603 | O4_0007 4604 | O18_0003 4605 | O2_0021 4606 | O23_0077 4607 | O4_0116 4608 | O24_0072 4609 | O10_0116 4610 | O22_0060 4611 | O16_0127 4612 | S6_0011 4613 | O14_0109 4614 | S4_0031 4615 | O16_0005 4616 | T2_0140 4617 | O22_0010 4618 | O7_0008 4619 | S3_0131 4620 | O5_0142 4621 | O9_0120 4622 | T1_0083 4623 | O18_0107 4624 | O16_0072 4625 | O26_0051 4626 | T2_0136 4627 | O4_0055 4628 | O14_0142 4629 | O7_0002 4630 | T7_0077 4631 | T5_0038 4632 | O17_0115 4633 | T6_0107 4634 | T7_0024 4635 | O5_0102 4636 | O24_0017 4637 | T6_0012 4638 | O26_0132 4639 | S1_0088 4640 | S8_0008 4641 | O6_0036 4642 | O10_0011 4643 | O12_0005 4644 | O23_0118 4645 | S2_0118 4646 | S3_0128 4647 | T1_0066 4648 | T2_0120 4649 | T7_0008 4650 | S6_0067 4651 | O7_0086 4652 | O9_0029 4653 | O3_0108 4654 | O6_0075 4655 | O19_0139 4656 | O12_0036 4657 | O26_0075 4658 | O17_0004 4659 | T7_0088 4660 | O26_0131 4661 | S7_0111 4662 | O6_0009 4663 | S1_0099 4664 | O16_0117 4665 | T2_0010 4666 | O6_0131 4667 | T7_0085 4668 | O18_0073 4669 | O25_0060 4670 | O17_0079 4671 | O13_0023 4672 | O26_0110 4673 | O7_0054 4674 | O12_0135 4675 | T3_0045 4676 | S5_0018 4677 | O5_0129 4678 | S5_0002 4679 | O26_0122 4680 | T2_0063 4681 | S4_0033 4682 | O26_0124 4683 | O7_0006 4684 | O12_0026 4685 | O26_0067 4686 | O25_0109 4687 | T4_0031 4688 | O14_0050 4689 | T6_0128 4690 | O10_0012 4691 | O15_0099 4692 | S2_0094 4693 | O9_0093 4694 | O12_0085 4695 | O22_0114 4696 | O23_0078 4697 | T3_0062 4698 | O24_0102 4699 | O5_0118 4700 | O13_0114 4701 | O17_0024 4702 | S2_0104 4703 | O6_0042 4704 | T1_0093 4705 | O18_0072 4706 | O6_0127 4707 | S7_0067 4708 | O6_0033 4709 | O3_0106 4710 | T2_0078 4711 | T7_0112 4712 | S8_0068 4713 | O25_0003 4714 | O13_0017 4715 | O25_0093 4716 | O3_0082 4717 | S3_0130 4718 | O14_0144 4719 | S8_0010 4720 | O2_0023 4721 | O10_0062 4722 | T2_0132 4723 | T8_0075 4724 | T7_0122 4725 | O13_0021 4726 | O16_0096 4727 | S4_0059 4728 | O26_0095 4729 | O3_0005 4730 | O17_0036 4731 | T3_0139 4732 | O22_0039 4733 | O16_0085 4734 | O15_0101 4735 | S7_0023 4736 | O19_0065 4737 | O10_0069 4738 | O8_0028 4739 | O17_0047 4740 | O16_0092 4741 | O19_0113 4742 | O24_0081 4743 | O6_0070 4744 | O11_0023 4745 | O24_0039 4746 | O11_0096 4747 | O10_0034 4748 | T7_0018 4749 | S2_0101 4750 | O10_0048 4751 | O16_0048 4752 | T2_0062 4753 | O3_0095 4754 | S2_0048 4755 | O14_0060 4756 | O3_0103 4757 | O2_0094 4758 | T6_0004 4759 | O2_0119 4760 | O1_0068 4761 | O20_0119 4762 | T1_0096 4763 | O17_0027 4764 | O10_0015 4765 | T7_0120 4766 | T5_0122 4767 | S1_0005 4768 | O19_0075 4769 | O9_0055 4770 | T3_0115 4771 | O20_0070 4772 | O21_0041 4773 | S3_0112 4774 | T6_0125 4775 | T4_0003 4776 | T3_0068 4777 | O10_0003 4778 | O26_0126 4779 | O15_0013 4780 | S3_0018 4781 | T3_0034 4782 | O9_0040 4783 | S6_0028 4784 | O18_0047 4785 | O16_0014 4786 | O15_0094 4787 | O19_0057 4788 | T2_0005 4789 | O26_0030 4790 | O15_0064 4791 | S1_0067 4792 | T1_0064 4793 | S5_0116 4794 | O18_0101 4795 | S3_0064 4796 | T7_0094 4797 | O7_0093 4798 | O1_0100 4799 | O18_0050 4800 | O13_0072 4801 | T2_0035 4802 | S6_0018 4803 | O21_0011 4804 | O16_0107 4805 | T4_0060 4806 | T2_0052 4807 | O21_0007 4808 | O18_0024 4809 | O17_0124 4810 | O5_0029 4811 | O16_0026 4812 | O15_0065 4813 | O6_0063 4814 | O9_0012 4815 | O20_0130 4816 | T5_0037 4817 | O25_0017 4818 | O9_0032 4819 | O24_0006 4820 | S1_0082 4821 | O16_0090 4822 | -------------------------------------------------------------------------------- /txts/ts.txt: -------------------------------------------------------------------------------- 1 | O15_0023 2 | O4_0092 3 | T6_0019 4 | O15_0022 5 | O25_0095 6 | O10_0071 7 | O8_0110 8 | S1_0075 9 | O15_0118 10 | O12_0027 11 | O6_0096 12 | O15_0004 13 | T8_0104 14 | S8_0024 15 | O12_0016 16 | O6_0121 17 | O15_0070 18 | S5_0086 19 | O23_0031 20 | O20_0098 21 | O26_0011 22 | S5_0085 23 | O5_0135 24 | T7_0127 25 | O9_0045 26 | T7_0012 27 | O21_0081 28 | O9_0019 29 | O16_0058 30 | O18_0078 31 | O18_0079 32 | S3_0117 33 | O13_0059 34 | O4_0090 35 | O16_0094 36 | O2_0058 37 | O17_0012 38 | O15_0027 39 | O16_0065 40 | O3_0130 41 | S2_0061 42 | T5_0014 43 | O19_0099 44 | S6_0060 45 | O9_0007 46 | O5_0009 47 | O2_0005 48 | O6_0129 49 | O5_0052 50 | O21_0116 51 | O13_0110 52 | O4_0011 53 | O18_0113 54 | T6_0098 55 | O20_0060 56 | S5_0075 57 | S2_0077 58 | O3_0018 59 | O26_0056 60 | O1_0005 61 | O16_0066 62 | S3_0071 63 | O17_0125 64 | O22_0038 65 | S6_0119 66 | T3_0130 67 | T8_0131 68 | T4_0071 69 | S8_0057 70 | O19_0081 71 | S6_0128 72 | O6_0057 73 | O4_0071 74 | O26_0021 75 | S5_0043 76 | O18_0044 77 | O15_0111 78 | S8_0113 79 | T2_0056 80 | O25_0113 81 | O10_0096 82 | S7_0017 83 | O24_0053 84 | O26_0049 85 | O25_0105 86 | O1_0040 87 | O7_0049 88 | O12_0033 89 | O4_0091 90 | O16_0054 91 | O11_0050 92 | O14_0023 93 | O20_0079 94 | O22_0082 95 | O12_0034 96 | O6_0090 97 | O16_0035 98 | O8_0056 99 | S1_0096 100 | T7_0114 101 | O25_0065 102 | O20_0109 103 | T3_0088 104 | O15_0024 105 | O13_0121 106 | O13_0052 107 | O23_0117 108 | S3_0090 109 | O7_0039 110 | O9_0119 111 | T5_0019 112 | O1_0131 113 | O10_0112 114 | O13_0082 115 | O5_0019 116 | O18_0030 117 | O8_0107 118 | O7_0026 119 | O20_0066 120 | O21_0071 121 | T8_0108 122 | S1_0109 123 | O9_0108 124 | O26_0029 125 | O25_0052 126 | O7_0046 127 | O5_0147 128 | O5_0101 129 | O6_0122 130 | O6_0012 131 | O26_0088 132 | T8_0118 133 | O18_0053 134 | O2_0114 135 | S6_0089 136 | T3_0136 137 | S1_0095 138 | O23_0065 139 | S8_0018 140 | O8_0063 141 | O19_0087 142 | T5_0096 143 | T5_0059 144 | O13_0127 145 | T4_0128 146 | O13_0057 147 | O22_0041 148 | O9_0087 149 | O19_0008 150 | T5_0023 151 | S1_0059 152 | O23_0041 153 | O20_0012 154 | S1_0093 155 | O12_0021 156 | O21_0021 157 | O25_0124 158 | O7_0069 159 | O2_0003 160 | O18_0117 161 | T7_0125 162 | O5_0026 163 | S2_0084 164 | T3_0116 165 | O18_0094 166 | S5_0026 167 | O1_0111 168 | S3_0043 169 | O23_0022 170 | O13_0093 171 | T2_0133 172 | O4_0113 173 | O26_0089 174 | O10_0104 175 | S3_0025 176 | O19_0017 177 | S5_0012 178 | T7_0020 179 | O24_0033 180 | O7_0092 181 | O21_0070 182 | S7_0005 183 | O21_0020 184 | O8_0124 185 | O8_0011 186 | O17_0112 187 | S8_0023 188 | S7_0034 189 | O8_0019 190 | O7_0047 191 | O2_0120 192 | S1_0051 193 | S5_0056 194 | S6_0041 195 | O18_0061 196 | S4_0092 197 | T6_0057 198 | T2_0049 199 | T4_0084 200 | O14_0029 201 | O5_0028 202 | O26_0027 203 | T5_0085 204 | O5_0016 205 | O22_0055 206 | O6_0115 207 | O26_0064 208 | S5_0035 209 | O5_0067 210 | O19_0029 211 | O15_0106 212 | O16_0062 213 | S5_0061 214 | O5_0116 215 | O16_0025 216 | S2_0112 217 | O12_0019 218 | S5_0022 219 | O9_0068 220 | T4_0034 221 | T4_0116 222 | O26_0002 223 | O14_0087 224 | O8_0084 225 | O18_0102 226 | T8_0105 227 | O22_0133 228 | T8_0128 229 | T4_0074 230 | O10_0061 231 | S8_0015 232 | O4_0101 233 | O19_0082 234 | O14_0137 235 | O15_0053 236 | S6_0092 237 | O17_0052 238 | O21_0125 239 | O5_0056 240 | O8_0096 241 | T8_0085 242 | T6_0104 243 | T7_0136 244 | O21_0054 245 | T8_0033 246 | T5_0075 247 | T2_0069 248 | O6_0076 249 | S7_0065 250 | O7_0130 251 | O23_0040 252 | O15_0133 253 | O9_0035 254 | O8_0023 255 | S2_0013 256 | O3_0133 257 | O21_0066 258 | O8_0094 259 | T1_0007 260 | O6_0064 261 | S1_0084 262 | O22_0030 263 | O2_0012 264 | O4_0109 265 | O6_0035 266 | S3_0116 267 | O4_0002 268 | O7_0082 269 | O2_0043 270 | S4_0070 271 | O23_0054 272 | S7_0124 273 | O3_0121 274 | S5_0020 275 | S4_0080 276 | O2_0062 277 | O26_0046 278 | O10_0049 279 | O26_0025 280 | O1_0109 281 | O23_0011 282 | O14_0113 283 | O7_0104 284 | O22_0074 285 | O22_0020 286 | O25_0054 287 | O17_0074 288 | S8_0131 289 | O6_0005 290 | O14_0053 291 | O20_0129 292 | S8_0080 293 | T1_0124 294 | O2_0073 295 | O17_0048 296 | S1_0048 297 | O14_0111 298 | T5_0035 299 | S7_0122 300 | T1_0105 301 | O10_0038 302 | T4_0069 303 | T5_0039 304 | O2_0125 305 | O19_0028 306 | T5_0087 307 | O4_0084 308 | O3_0124 309 | O6_0084 310 | O6_0091 311 | O3_0123 312 | O8_0109 313 | T3_0093 314 | O22_0053 315 | T5_0095 316 | S1_0083 317 | O13_0131 318 | O3_0043 319 | O12_0058 320 | O4_0125 321 | T8_0098 322 | S4_0081 323 | S3_0038 324 | O8_0027 325 | O3_0138 326 | O22_0047 327 | O13_0094 328 | O15_0031 329 | O21_0104 330 | O9_0129 331 | O3_0112 332 | S6_0053 333 | S8_0004 334 | O20_0085 335 | O19_0101 336 | T7_0036 337 | O26_0068 338 | O23_0023 339 | O9_0026 340 | O9_0037 341 | O9_0095 342 | T6_0094 343 | O21_0103 344 | O26_0001 345 | O4_0089 346 | O20_0074 347 | S5_0067 348 | O22_0067 349 | T8_0130 350 | O25_0053 351 | S8_0092 352 | T1_0086 353 | O5_0099 354 | O18_0106 355 | O18_0056 356 | O19_0112 357 | O2_0105 358 | T2_0058 359 | S1_0031 360 | S5_0046 361 | S5_0058 362 | T5_0076 363 | S8_0012 364 | S7_0041 365 | O11_0061 366 | T6_0046 367 | O4_0076 368 | O26_0045 369 | S3_0048 370 | O14_0048 371 | O26_0100 372 | O10_0076 373 | O4_0102 374 | O25_0012 375 | O20_0092 376 | S2_0046 377 | O19_0027 378 | O10_0099 379 | S5_0014 380 | O14_0005 381 | O3_0007 382 | O21_0107 383 | O11_0054 384 | S1_0061 385 | T4_0054 386 | O14_0059 387 | O11_0099 388 | O6_0065 389 | O10_0125 390 | O1_0085 391 | S8_0066 392 | S8_0011 393 | O12_0138 394 | S1_0039 395 | O17_0091 396 | O4_0042 397 | O7_0124 398 | S6_0071 399 | T2_0045 400 | S8_0025 401 | T1_0048 402 | T4_0056 403 | O1_0086 404 | O18_0082 405 | O26_0059 406 | T4_0023 407 | T1_0001 408 | O19_0079 409 | O7_0031 410 | O4_0107 411 | O18_0077 412 | O7_0029 413 | O24_0056 414 | O2_0011 415 | O8_0021 416 | O8_0108 417 | S8_0129 418 | T6_0021 419 | O14_0046 420 | O21_0124 421 | T5_0065 422 | O2_0101 423 | O23_0092 424 | O9_0111 425 | O15_0078 426 | T5_0084 427 | O23_0110 428 | O15_0056 429 | O25_0010 430 | O7_0062 431 | O10_0124 432 | O22_0027 433 | T8_0055 434 | O12_0011 435 | O21_0033 436 | T7_0134 437 | O19_0073 438 | S5_0111 439 | O3_0127 440 | O21_0096 441 | O14_0025 442 | O20_0118 443 | S5_0062 444 | S5_0076 445 | O20_0040 446 | S7_0028 447 | S8_0119 448 | O14_0086 449 | O21_0122 450 | S2_0058 451 | T1_0025 452 | O20_0067 453 | T1_0078 454 | O20_0069 455 | O6_0086 456 | O5_0062 457 | O6_0062 458 | S1_0097 459 | O7_0118 460 | S7_0123 461 | O3_0020 462 | T1_0123 463 | S5_0031 464 | O7_0105 465 | S3_0079 466 | S5_0128 467 | S3_0081 468 | O23_0091 469 | O5_0103 470 | O9_0124 471 | O24_0118 472 | S5_0080 473 | O22_0025 474 | T2_0123 475 | S6_0050 476 | T1_0061 477 | O16_0119 478 | O10_0077 479 | O10_0026 480 | O15_0095 481 | O21_0082 482 | O6_0130 483 | T7_0033 484 | S7_0030 485 | S1_0102 486 | O11_0056 487 | O13_0092 488 | O26_0115 489 | T1_0053 490 | O26_0065 491 | O13_0115 492 | S1_0090 493 | O16_0077 494 | T7_0128 495 | S8_0035 496 | O9_0070 497 | O11_0039 498 | O1_0004 499 | S2_0116 500 | O26_0039 501 | O14_0004 502 | O12_0032 503 | O7_0095 504 | O2_0071 505 | T5_0034 506 | O2_0086 507 | T2_0034 508 | O18_0065 509 | O7_0037 510 | T4_0043 511 | O18_0042 512 | T7_0084 513 | O12_0104 514 | T6_0010 515 | O13_0013 516 | T8_0074 517 | O21_0005 518 | S8_0041 519 | O3_0119 520 | T6_0047 521 | T5_0088 522 | T4_0122 523 | O20_0135 524 | O13_0024 525 | O16_0045 526 | O5_0014 527 | O23_0060 528 | O8_0133 529 | O17_0066 530 | T8_0106 531 | S7_0085 532 | S8_0084 533 | O6_0078 534 | O9_0048 535 | O18_0057 536 | T6_0122 537 | O23_0028 538 | O13_0126 539 | O2_0074 540 | O22_0050 541 | O13_0080 542 | O15_0079 543 | S7_0031 544 | T8_0111 545 | S3_0042 546 | O26_0047 547 | O1_0075 548 | O21_0047 549 | O5_0027 550 | O21_0069 551 | T1_0008 552 | O4_0083 553 | O23_0033 554 | O7_0128 555 | O6_0113 556 | O20_0007 557 | O2_0027 558 | O11_0070 559 | S5_0039 560 | O19_0001 561 | O10_0025 562 | O6_0055 563 | S2_0115 564 | O18_0115 565 | O8_0097 566 | O10_0072 567 | O25_0011 568 | T3_0103 569 | S5_0077 570 | T1_0142 571 | O10_0018 572 | O9_0080 573 | O5_0017 574 | O26_0013 575 | O5_0144 576 | T2_0046 577 | O12_0028 578 | O4_0010 579 | S3_0061 580 | O2_0108 581 | S1_0063 582 | S7_0055 583 | S1_0122 584 | O14_0055 585 | O25_0127 586 | T5_0107 587 | O14_0123 588 | O8_0065 589 | O5_0115 590 | O3_0004 591 | O21_0058 592 | O7_0072 593 | O2_0039 594 | S1_0127 595 | T5_0040 596 | O7_0063 597 | O18_0100 598 | S8_0053 599 | O15_0122 600 | O25_0103 601 | O24_0093 602 | S6_0063 603 | O21_0078 604 | S1_0037 605 | O26_0116 606 | O26_0028 607 | T3_0106 608 | O12_0111 609 | O19_0030 610 | O17_0113 611 | O10_0126 612 | O12_0140 613 | O17_0065 614 | O17_0089 615 | O23_0046 616 | O24_0041 617 | O22_0049 618 | O2_0009 619 | O19_0018 620 | O18_0095 621 | O2_0104 622 | S8_0081 623 | O2_0089 624 | O7_0131 625 | O16_0047 626 | O5_0143 627 | T7_0013 628 | O9_0060 629 | O16_0081 630 | O4_0022 631 | O21_0055 632 | T2_0082 633 | O23_0052 634 | T6_0059 635 | T8_0032 636 | O19_0031 637 | O22_0105 638 | S4_0127 639 | S3_0053 640 | O3_0137 641 | S3_0065 642 | S1_0020 643 | S5_0032 644 | T2_0051 645 | S2_0051 646 | T3_0027 647 | O21_0108 648 | S1_0074 649 | T7_0131 650 | S4_0128 651 | O7_0067 652 | T7_0037 653 | S5_0112 654 | T8_0120 655 | O25_0021 656 | O24_0043 657 | O20_0082 658 | O17_0078 659 | O4_0004 660 | T4_0032 661 | O1_0032 662 | O25_0122 663 | S6_0113 664 | O9_0117 665 | O24_0034 666 | O23_0039 667 | O14_0045 668 | S1_0001 669 | O17_0042 670 | O12_0112 671 | O9_0003 672 | O3_0030 673 | T8_0024 674 | O25_0107 675 | S3_0036 676 | T5_0020 677 | O9_0116 678 | O23_0020 679 | O7_0090 680 | O10_0117 681 | S1_0121 682 | T6_0024 683 | O14_0070 684 | S1_0116 685 | O22_0117 686 | O19_0016 687 | O26_0035 688 | O16_0078 689 | T1_0062 690 | O9_0031 691 | S3_0013 692 | S8_0077 693 | O14_0064 694 | S3_0092 695 | O22_0042 696 | S5_0127 697 | O1_0133 698 | O6_0119 699 | O13_0133 700 | T1_0122 701 | O15_0085 702 | S6_0106 703 | S2_0057 704 | O18_0092 705 | O7_0119 706 | O16_0024 707 | O11_0072 708 | O13_0119 709 | O11_0060 710 | S8_0006 711 | S8_0026 712 | O24_0106 713 | O21_0044 714 | O9_0081 715 | O6_0128 716 | T2_0122 717 | O6_0067 718 | T3_0129 719 | S6_0098 720 | O18_0080 721 | O14_0099 722 | O9_0023 723 | S4_0056 724 | O23_0072 725 | S8_0121 726 | S5_0068 727 | T7_0126 728 | O26_0121 729 | O6_0089 730 | O18_0132 731 | T4_0083 732 | O8_0099 733 | O3_0017 734 | S4_0093 735 | S1_0060 736 | T1_0088 737 | O22_0036 738 | O9_0083 739 | O15_0069 740 | T2_0050 741 | O21_0049 742 | S3_0050 743 | T4_0111 744 | S3_0012 745 | O10_0046 746 | O3_0136 747 | O20_0123 748 | O9_0096 749 | T5_0015 750 | O6_0034 751 | S6_0090 752 | O3_0029 753 | S1_0080 754 | O14_0071 755 | S2_0083 756 | S4_0104 757 | O19_0020 758 | S7_0064 759 | O2_0123 760 | T8_0046 761 | S7_0117 762 | O5_0127 763 | O23_0073 764 | O11_0058 765 | O7_0116 766 | S4_0126 767 | O9_0057 768 | T7_0096 769 | T1_0015 770 | S5_0004 771 | O13_0034 772 | O26_0057 773 | O4_0009 774 | O6_0079 775 | O15_0020 776 | O16_0105 777 | O6_0045 778 | T5_0036 779 | O20_0106 780 | T3_0128 781 | O1_0024 782 | O26_0009 783 | O21_0097 784 | T1_0075 785 | O11_0115 786 | O2_0022 787 | T6_0054 788 | T7_0057 789 | S8_0019 790 | O20_0009 791 | O17_0032 792 | O14_0085 793 | O6_0120 794 | S3_0078 795 | O19_0066 796 | O12_0050 797 | O8_0022 798 | S3_0031 799 | T2_0134 800 | O21_0121 801 | O25_0077 802 | S4_0118 803 | T6_0025 804 | O21_0025 805 | O11_0044 806 | O9_0002 807 | O26_0036 808 | S8_0016 809 | T7_0116 810 | O15_0016 811 | O22_0057 812 | O7_0050 813 | O21_0023 814 | O23_0051 815 | T3_0113 816 | T7_0032 817 | T7_0121 818 | O13_0108 819 | S4_0082 820 | O21_0008 821 | T4_0121 822 | T8_0107 823 | S5_0073 824 | S2_0019 825 | O5_0133 826 | O12_0123 827 | O11_0024 828 | O8_0135 829 | O24_0116 830 | O20_0021 831 | S7_0036 832 | O8_0040 833 | O16_0064 834 | S7_0054 835 | O2_0056 836 | O12_0090 837 | O21_0115 838 | O7_0141 839 | T5_0089 840 | T6_0114 841 | T4_0129 842 | T3_0125 843 | O26_0105 844 | O21_0061 845 | O1_0123 846 | O9_0018 847 | O7_0112 848 | O4_0021 849 | O15_0067 850 | O22_0071 851 | S1_0038 852 | O8_0033 853 | O22_0056 854 | O12_0051 855 | S5_0078 856 | O7_0099 857 | O19_0068 858 | O1_0033 859 | T4_0112 860 | O23_0048 861 | O4_0020 862 | S6_0025 863 | O17_0055 864 | O17_0100 865 | O12_0103 866 | O8_0132 867 | O9_0105 868 | O6_0018 869 | O9_0128 870 | O2_0128 871 | O16_0013 872 | O8_0007 873 | O6_0052 874 | O18_0055 875 | O6_0008 876 | O3_0135 877 | O8_0095 878 | T3_0091 879 | T7_0026 880 | T5_0046 881 | S2_0053 882 | O25_0108 883 | S8_0029 884 | O24_0075 885 | T8_0116 886 | O16_0095 887 | O8_0145 888 | T7_0025 889 | O15_0005 890 | O10_0123 891 | O8_0143 892 | T2_0055 893 | T4_0126 894 | O5_0134 895 | O2_0031 896 | S7_0004 897 | O9_0001 898 | O14_0047 899 | O3_0016 900 | O12_0099 901 | O9_0061 902 | O16_0053 903 | O26_0022 904 | O2_0026 905 | T5_0028 906 | O25_0067 907 | S6_0101 908 | S6_0076 909 | T6_0023 910 | O11_0027 911 | O22_0123 912 | T2_0038 913 | O19_0069 914 | S7_0094 915 | T8_0109 916 | O12_0096 917 | O8_0039 918 | S6_0086 919 | S8_0132 920 | S1_0098 921 | S7_0130 922 | O7_0081 923 | O1_0110 924 | O17_0128 925 | O13_0132 926 | O2_0049 927 | O23_0122 928 | S6_0114 929 | O12_0078 930 | O25_0092 931 | O25_0082 932 | O1_0022 933 | O15_0028 934 | O4_0077 935 | O12_0131 936 | O20_0120 937 | O12_0130 938 | O15_0108 939 | O6_0073 940 | O2_0110 941 | T1_0016 942 | O13_0012 943 | O6_0092 944 | S5_0028 945 | T4_0118 946 | T7_0130 947 | O6_0103 948 | S1_0086 949 | S2_0041 950 | O5_0153 951 | O11_0033 952 | T4_0127 953 | T5_0022 954 | S4_0071 955 | O19_0107 956 | T5_0113 957 | O20_0117 958 | O19_0086 959 | O10_0037 960 | O16_0059 961 | S7_0033 962 | O5_0114 963 | O3_0134 964 | O21_0076 965 | T5_0108 966 | O13_0120 967 | T3_0138 968 | O12_0091 969 | O14_0082 970 | O10_0115 971 | O11_0032 972 | O25_0020 973 | T8_0103 974 | T7_0059 975 | O24_0050 976 | O6_0003 977 | O19_0105 978 | O12_0115 979 | T4_0053 980 | O13_0111 981 | O25_0061 982 | O4_0018 983 | O25_0081 984 | O14_0034 985 | T7_0087 986 | O7_0091 987 | S3_0037 988 | O22_0037 989 | O12_0116 990 | O24_0065 991 | O22_0048 992 | O15_0083 993 | S7_0103 994 | S7_0011 995 | O15_0119 996 | T1_0051 997 | S2_0113 998 | O13_0058 999 | S1_0092 1000 | S3_0066 1001 | O10_0028 1002 | O9_0109 1003 | O26_0104 1004 | S2_0059 1005 | O13_0010 1006 | O14_0122 1007 | O7_0109 1008 | O7_0051 1009 | S2_0063 1010 | O13_0028 1011 | T4_0099 1012 | O21_0015 1013 | S1_0019 1014 | O22_0087 1015 | O22_0094 1016 | O3_0032 1017 | O21_0042 1018 | O18_0041 1019 | O13_0070 1020 | S2_0103 1021 | O14_0100 1022 | T1_0091 1023 | S3_0054 1024 | O22_0093 1025 | S5_0057 1026 | O12_0039 1027 | O22_0115 1028 | T8_0117 1029 | S3_0069 1030 | S1_0091 1031 | O7_0085 1032 | O13_0117 1033 | O10_0027 1034 | O20_0006 1035 | T3_0015 1036 | O26_0024 1037 | O8_0142 1038 | O9_0088 1039 | S4_0058 1040 | T3_0014 1041 | O4_0119 1042 | O5_0073 1043 | O5_0053 1044 | S5_0038 1045 | O5_0140 1046 | S6_0059 1047 | S5_0087 1048 | O3_0035 1049 | S2_0030 1050 | S1_0085 1051 | S7_0022 1052 | S3_0115 1053 | O3_0009 1054 | O20_0018 1055 | O3_0109 1056 | O19_0080 1057 | S5_0060 1058 | O17_0022 1059 | O9_0077 1060 | O13_0038 1061 | S8_0054 1062 | S6_0078 1063 | O4_0124 1064 | O10_0103 1065 | S4_0057 1066 | O11_0117 1067 | O3_0003 1068 | O4_0086 1069 | S3_0103 1070 | O5_0002 1071 | S1_0027 1072 | S2_0080 1073 | S5_0027 1074 | O11_0091 1075 | T6_0031 1076 | O8_0008 1077 | S1_0043 1078 | S2_0065 1079 | O21_0019 1080 | S8_0059 1081 | O25_0064 1082 | O18_0129 1083 | T8_0126 1084 | O7_0071 1085 | O22_0031 1086 | O15_0008 1087 | O4_0121 1088 | O18_0043 1089 | O25_0030 1090 | T8_0057 1091 | O12_0110 1092 | O4_0080 1093 | O19_0093 1094 | O8_0146 1095 | S6_0020 1096 | T8_0056 1097 | O13_0122 1098 | T7_0023 1099 | O18_0069 1100 | S1_0045 1101 | O17_0127 1102 | S8_0103 1103 | T4_0035 1104 | O3_0010 1105 | O25_0023 1106 | O10_0036 1107 | O5_0064 1108 | O20_0093 1109 | O4_0087 1110 | S2_0049 1111 | T4_0131 1112 | O8_0082 1113 | T6_0053 1114 | T1_0141 1115 | O9_0038 1116 | O10_0097 1117 | O23_0049 1118 | S8_0065 1119 | S2_0029 1120 | O13_0103 1121 | O16_0093 1122 | O20_0128 1123 | O17_0044 1124 | O7_0044 1125 | S7_0082 1126 | O7_0032 1127 | O13_0046 1128 | T5_0006 1129 | O23_0061 1130 | O21_0065 1131 | S4_0119 1132 | O9_0049 1133 | S7_0052 1134 | O22_0081 1135 | O25_0129 1136 | S7_0140 1137 | T6_0034 1138 | T1_0043 1139 | O25_0041 1140 | T1_0002 1141 | T6_0113 1142 | S8_0055 1143 | O20_0139 1144 | S6_0023 1145 | O5_0059 1146 | O19_0015 1147 | O10_0106 1148 | T5_0097 1149 | O20_0081 1150 | S6_0105 1151 | O23_0029 1152 | T4_0033 1153 | O9_0059 1154 | S7_0104 1155 | O22_0079 1156 | T2_0142 1157 | S5_0011 1158 | O25_0115 1159 | T6_0037 1160 | O12_0046 1161 | S4_0045 1162 | O25_0083 1163 | O11_0103 1164 | O18_0090 1165 | O20_0010 1166 | T8_0069 1167 | O8_0120 1168 | O2_0129 1169 | S7_0063 1170 | O2_0106 1171 | T6_0121 1172 | S8_0088 1173 | O21_0046 1174 | S6_0080 1175 | T4_0082 1176 | T5_0053 1177 | T8_0043 1178 | O10_0111 1179 | O21_0027 1180 | S3_0114 1181 | O15_0084 1182 | O14_0097 1183 | O4_0103 1184 | O23_0004 1185 | T4_0040 1186 | T4_0041 1187 | O20_0127 1188 | T6_0050 1189 | O21_0086 1190 | O14_0081 1191 | S7_0129 1192 | O4_0082 1193 | T4_0029 1194 | O21_0088 1195 | O22_0118 1196 | S8_0078 1197 | O10_0110 1198 | T8_0099 1199 | S1_0024 1200 | T6_0115 1201 | T7_0051 1202 | T3_0007 1203 | O6_0040 1204 | O20_0005 1205 | T1_0024 1206 | O4_0078 1207 | T5_0032 1208 | O9_0056 1209 | T5_0058 1210 | O20_0131 1211 | O16_0083 1212 | O14_0022 1213 | O23_0050 1214 | O6_0139 1215 | T7_0039 1216 | T5_0027 1217 | T2_0135 1218 | O12_0097 1219 | O17_0011 1220 | O5_0100 1221 | O9_0082 1222 | S6_0058 1223 | O14_0028 1224 | O16_0057 1225 | T3_0006 1226 | O26_0092 1227 | O23_0064 1228 | S4_0046 1229 | S8_0120 1230 | T6_0058 1231 | O5_0130 1232 | S5_0042 1233 | O17_0101 1234 | O22_0075 1235 | O20_0034 1236 | O24_0107 1237 | T3_0026 1238 | S1_0094 1239 | T6_0045 1240 | S5_0089 1241 | O2_0098 1242 | O26_0032 1243 | T7_0099 1244 | S7_0141 1245 | O15_0039 1246 | T4_0100 1247 | O14_0037 1248 | S5_0113 1249 | S6_0046 1250 | T7_0009 1251 | O8_0083 1252 | O21_0085 1253 | S6_0112 1254 | O11_0071 1255 | O20_0080 1256 | T7_0001 1257 | O25_0114 1258 | O21_0131 1259 | S5_0010 1260 | T7_0072 1261 | O11_0093 1262 | S5_0101 1263 | O26_0037 1264 | S8_0064 1265 | O11_0116 1266 | S3_0021 1267 | O26_0113 1268 | O23_0047 1269 | O15_0080 1270 | S7_0083 1271 | S6_0045 1272 | O15_0011 1273 | O5_0020 1274 | O18_0036 1275 | S3_0028 1276 | O15_0132 1277 | S2_0089 1278 | O6_0061 1279 | O4_0096 1280 | O6_0097 1281 | O13_0036 1282 | T1_0082 1283 | O12_0071 1284 | O10_0105 1285 | O23_0125 1286 | O6_0085 1287 | T7_0100 1288 | O17_0035 1289 | O10_0084 1290 | S1_0050 1291 | T5_0074 1292 | O23_0002 1293 | S5_0088 1294 | O3_0111 1295 | S2_0111 1296 | O15_0001 1297 | O9_0123 1298 | S3_0002 1299 | O25_0022 1300 | O24_0054 1301 | O2_0020 1302 | O11_0080 1303 | O22_0080 1304 | S2_0126 1305 | O24_0105 1306 | O24_0051 1307 | T7_0071 1308 | O2_0055 1309 | O17_0043 1310 | S5_0070 1311 | T5_0106 1312 | O20_0134 1313 | O4_0023 1314 | O19_0002 1315 | S7_0076 1316 | O2_0122 1317 | O23_0103 1318 | O26_0102 1319 | O2_0061 1320 | O18_0130 1321 | O14_0136 1322 | O6_0056 1323 | O15_0038 1324 | O15_0017 1325 | O6_0024 1326 | S5_0021 1327 | O20_0110 1328 | O21_0112 1329 | O25_0120 1330 | O7_0068 1331 | S4_0107 1332 | O10_0101 1333 | O1_0012 1334 | O21_0100 1335 | O18_0031 1336 | O6_0010 1337 | S8_0085 1338 | O12_0141 1339 | S7_0096 1340 | O15_0120 1341 | S8_0040 1342 | S6_0129 1343 | O11_0049 1344 | O8_0100 1345 | S2_0114 1346 | O22_0059 1347 | S7_0062 1348 | O8_0121 1349 | O20_0121 1350 | O14_0073 1351 | O9_0115 1352 | O23_0079 1353 | O6_0100 1354 | O4_0085 1355 | S8_0002 1356 | S1_0022 1357 | T7_0046 1358 | S5_0013 1359 | O4_0028 1360 | O20_0013 1361 | O23_0059 1362 | O12_0122 1363 | O15_0112 1364 | O13_0035 1365 | O4_0110 1366 | O8_0052 1367 | S4_0095 1368 | T8_0044 1369 | O3_0117 1370 | O17_0031 1371 | O1_0013 1372 | S3_0105 1373 | T4_0072 1374 | O14_0033 1375 | O15_0033 1376 | T7_0133 1377 | T2_0027 1378 | T8_0110 1379 | S7_0046 1380 | O18_0089 1381 | T5_0031 1382 | O2_0050 1383 | O2_0002 1384 | O21_0084 1385 | O21_0053 1386 | O24_0062 1387 | O9_0008 1388 | O1_0074 1389 | T5_0099 1390 | S7_0049 1391 | T5_0066 1392 | O26_0080 1393 | T6_0039 1394 | O17_0020 1395 | O15_0057 1396 | O17_0037 1397 | T1_0079 1398 | S3_0101 1399 | T7_0067 1400 | T4_0058 1401 | O9_0106 1402 | O15_0100 1403 | O4_0114 1404 | S5_0044 1405 | O17_0073 1406 | O25_0031 1407 | T4_0089 1408 | O16_0009 1409 | O22_0054 1410 | O5_0125 1411 | O9_0063 1412 | O14_0098 1413 | S2_0064 1414 | O9_0076 1415 | O26_0048 1416 | O10_0041 1417 | O9_0047 1418 | O5_0063 1419 | O20_0028 1420 | T4_0104 1421 | O9_0024 1422 | O21_0004 1423 | T1_0067 1424 | O16_0070 1425 | O21_0080 1426 | O15_0042 1427 | O19_0092 1428 | O13_0015 1429 | O20_0111 1430 | O18_0054 1431 | O9_0075 1432 | S2_0060 1433 | O9_0118 1434 | O24_0068 1435 | T8_0045 1436 | O13_0048 1437 | S7_0019 1438 | O21_0059 1439 | O16_0043 1440 | S3_0022 1441 | S4_0047 1442 | O1_0028 1443 | O7_0080 1444 | O4_0112 1445 | T2_0096 1446 | O20_0003 1447 | O1_0122 1448 | T1_0050 1449 | S6_0062 1450 | S1_0002 1451 | O20_0133 1452 | T6_0129 1453 | S1_0044 1454 | S7_0068 1455 | O25_0096 1456 | O5_0126 1457 | T1_0073 1458 | S1_0103 1459 | S1_0062 1460 | S6_0100 1461 | O5_0148 1462 | T5_0055 1463 | T2_0025 1464 | O8_0005 1465 | T7_0101 1466 | O14_0024 1467 | S5_0033 1468 | O11_0084 1469 | O13_0003 1470 | O6_0116 1471 | T5_0111 1472 | O5_0141 1473 | T7_0115 1474 | S2_0117 1475 | O10_0088 1476 | O4_0120 1477 | O1_0099 1478 | O14_0006 1479 | O17_0053 1480 | T1_0074 1481 | O15_0066 1482 | O26_0103 1483 | O14_0052 1484 | O1_0021 1485 | O17_0033 1486 | O25_0118 1487 | O7_0058 1488 | T8_0013 1489 | O2_0048 1490 | T1_0036 1491 | T6_0066 1492 | S6_0081 1493 | O6_0029 1494 | O21_0077 1495 | O19_0007 1496 | O14_0119 1497 | O9_0067 1498 | O5_0001 1499 | O21_0110 1500 | T5_0109 1501 | O17_0085 1502 | O18_0066 1503 | O23_0109 1504 | O5_0048 1505 | T7_0011 1506 | T7_0141 1507 | T8_0097 1508 | O7_0103 1509 | O14_0038 1510 | T4_0101 1511 | O6_0080 1512 | S3_0102 1513 | O8_0125 1514 | O18_0020 1515 | O25_0066 1516 | O8_0081 1517 | O3_0139 1518 | T2_0037 1519 | O22_0028 1520 | O18_0062 1521 | T4_0087 1522 | T6_0041 1523 | O18_0021 1524 | S3_0113 1525 | T6_0069 1526 | O14_0084 1527 | T7_0052 1528 | O19_0095 1529 | S1_0028 1530 | O22_0078 1531 | O25_0106 1532 | S6_0047 1533 | O14_0040 1534 | O2_0090 1535 | O13_0071 1536 | O26_0043 1537 | S5_0037 1538 | O5_0021 1539 | S5_0053 1540 | O12_0040 1541 | O17_0077 1542 | O10_0029 1543 | S7_0021 1544 | O12_0062 1545 | S7_0051 1546 | O19_0009 1547 | O6_0002 1548 | O9_0009 1549 | O23_0121 1550 | O1_0035 1551 | O3_0140 1552 | S2_0128 1553 | O7_0098 1554 | S7_0093 1555 | O13_0118 1556 | T4_0088 1557 | T1_0060 1558 | T5_0068 1559 | O8_0041 1560 | O22_0061 1561 | T6_0030 1562 | S3_0091 1563 | O11_0059 1564 | O7_0094 1565 | O12_0082 1566 | S8_0067 1567 | O6_0138 1568 | O23_0010 1569 | O14_0012 1570 | O25_0078 1571 | O21_0034 1572 | O1_0121 1573 | O26_0128 1574 | O2_0107 1575 | O13_0060 1576 | O20_0056 1577 | S5_0059 1578 | O9_0020 1579 | S2_0127 1580 | O17_0016 1581 | S5_0025 1582 | O17_0118 1583 | O8_0085 1584 | O25_0112 1585 | S6_0051 1586 | O25_0121 1587 | O15_0054 1588 | S7_0128 1589 | S3_0027 1590 | O1_0034 1591 | O24_0130 1592 | O20_0046 1593 | O17_0088 1594 | O16_0056 1595 | O12_0119 1596 | O19_0010 1597 | S7_0095 1598 | O8_0053 1599 | T7_0113 1600 | T7_0066 1601 | S8_0069 1602 | O9_0064 1603 | S6_0093 1604 | O2_0072 1605 | O22_0106 1606 | O15_0006 1607 | O12_0045 1608 | O26_0093 1609 | S5_0066 1610 | O2_0111 1611 | O18_0023 1612 | S8_0027 1613 | T3_0118 1614 | S7_0029 1615 | O5_0113 1616 | O9_0025 1617 | O2_0126 1618 | O11_0048 1619 | O15_0082 1620 | O21_0002 1621 | S4_0106 1622 | O5_0117 1623 | S1_0049 1624 | O6_0058 1625 | O23_0027 1626 | S6_0077 1627 | S2_0047 1628 | T4_0105 1629 | O10_0085 1630 | O20_0068 1631 | O15_0041 1632 | O4_0127 1633 | T5_0042 1634 | O23_0084 1635 | O20_0122 1636 | O11_0047 1637 | O12_0124 1638 | T1_0081 1639 | O9_0069 1640 | S7_0080 1641 | T7_0003 1642 | S8_0115 1643 | O13_0125 1644 | O3_0031 1645 | O21_0003 1646 | S3_0026 1647 | O3_0034 1648 | O12_0132 1649 | O18_0019 1650 | O20_0053 1651 | T2_0095 1652 | O24_0064 1653 | O25_0123 1654 | S3_0059 1655 | S7_0097 1656 | O26_0058 1657 | T1_0041 1658 | O15_0096 1659 | O24_0066 1660 | S1_0104 1661 | O6_0023 1662 | O16_0052 1663 | O8_0020 1664 | O13_0045 1665 | S8_0071 1666 | S3_0104 1667 | O2_0059 1668 | O7_0121 1669 | O24_0076 1670 | T1_0140 1671 | O21_0057 1672 | O18_0128 1673 | O21_0043 1674 | O7_0025 1675 | T1_0104 1676 | S2_0008 1677 | T4_0024 1678 | O9_0092 1679 | T5_0024 1680 | T4_0044 1681 | O24_0074 1682 | T2_0039 1683 | S8_0001 1684 | T8_0086 1685 | T7_0111 1686 | O5_0081 1687 | O17_0116 1688 | S7_0081 1689 | O16_0076 1690 | O24_0104 1691 | T6_0042 1692 | O16_0061 1693 | T3_0092 1694 | S5_0118 1695 | O13_0130 1696 | O16_0029 1697 | T4_0080 1698 | O19_0019 1699 | O13_0011 1700 | O17_0062 1701 | O25_0084 1702 | O12_0052 1703 | O8_0064 1704 | O25_0055 1705 | S3_0093 1706 | O24_0119 1707 | O21_0064 1708 | O14_0010 1709 | T4_0077 1710 | O6_0011 1711 | T4_0076 1712 | O18_0088 1713 | O20_0041 1714 | S7_0012 1715 | O20_0004 1716 | O26_0117 1717 | O17_0019 1718 | O21_0098 1719 | O9_0104 1720 | T5_0047 1721 | O15_0009 1722 | O17_0086 1723 | O8_0123 1724 | O12_0084 1725 | O16_0027 1726 | S4_0117 1727 | O1_0006 1728 | T8_0129 1729 | O4_0100 1730 | O4_0108 1731 | T8_0119 1732 | O26_0118 1733 | S6_0036 1734 | O6_0068 1735 | O3_0113 1736 | O18_0104 1737 | O7_0030 1738 | T4_0045 1739 | S8_0056 1740 | T1_0099 1741 | T1_0026 1742 | O14_0039 1743 | O26_0040 1744 | O14_0112 1745 | T5_0005 1746 | O24_0052 1747 | O9_0112 1748 | S6_0102 1749 | T8_0025 1750 | O2_0068 1751 | O25_0004 1752 | S1_0008 1753 | O8_0086 1754 | T6_0068 1755 | O22_0069 1756 | O13_0123 1757 | O12_0020 1758 | T3_0114 1759 | O18_0114 1760 | T4_0117 1761 | O19_0106 1762 | S5_0126 1763 | T5_0077 1764 | T6_0087 1765 | O26_0023 1766 | O20_0023 1767 | T2_0026 1768 | O1_0010 1769 | O6_0022 1770 | O5_0025 1771 | O5_0128 1772 | O8_0029 1773 | O8_0136 1774 | T6_0099 1775 | S3_0060 1776 | T6_0038 1777 | S1_0110 1778 | S1_0128 1779 | O12_0089 1780 | O23_0083 1781 | T3_0127 1782 | T1_0049 1783 | O10_0079 1784 | O23_0104 1785 | O1_0097 1786 | O25_0076 1787 | O25_0126 1788 | O14_0120 1789 | O8_0010 1790 | O14_0065 1791 | O3_0122 1792 | S1_0025 1793 | O14_0056 1794 | S1_0108 1795 | T5_0100 1796 | O24_0042 1797 | T1_0092 1798 | O11_0043 1799 | T1_0100 1800 | O17_0061 1801 | O25_0128 1802 | O18_0033 1803 | O10_0050 1804 | O10_0083 1805 | O22_0068 1806 | S7_0018 1807 | O7_0129 1808 | O22_0116 1809 | T8_0067 1810 | O6_0132 1811 | O5_0078 1812 | O18_0118 1813 | O9_0050 1814 | O7_0033 1815 | O13_0107 1816 | O2_0099 1817 | O12_0077 1818 | O21_0045 1819 | S6_0029 1820 | O21_0022 1821 | O16_0046 1822 | O17_0126 1823 | S1_0081 1824 | S1_0120 1825 | S5_0055 1826 | O6_0136 1827 | O18_0038 1828 | S5_0045 1829 | O3_0002 1830 | T6_0011 1831 | S6_0074 1832 | O19_0088 1833 | O6_0133 1834 | O23_0085 1835 | O4_0126 1836 | S1_0117 1837 | O10_0100 1838 | T5_0101 1839 | T7_0050 1840 | O17_0041 1841 | S2_0018 1842 | O16_0106 1843 | S8_0052 1844 | S3_0073 1845 | S4_0044 1846 | O13_0128 1847 | O4_0054 1848 | O17_0054 1849 | O3_0042 1850 | O12_0114 1851 | O2_0095 1852 | T8_0035 1853 | O12_0083 1854 | T5_0011 1855 | O15_0012 1856 | O8_0087 1857 | O20_0105 1858 | S8_0118 1859 | O14_0057 1860 | O22_0063 1861 | O7_0084 1862 | O21_0001 1863 | O15_0055 1864 | O24_0014 1865 | T4_0057 1866 | S1_0026 1867 | O25_0104 1868 | O3_0110 1869 | S4_0083 1870 | O3_0132 1871 | S1_0033 1872 | T6_0086 1873 | S6_0034 1874 | O5_0022 1875 | S7_0139 1876 | S1_0079 1877 | O4_0097 1878 | T6_0020 1879 | S4_0116 1880 | O26_0114 1881 | O3_0089 1882 | O15_0002 1883 | S7_0075 1884 | O2_0004 1885 | T4_0113 1886 | T7_0144 1887 | O8_0119 1888 | O2_0069 1889 | S3_0083 1890 | O18_0049 1891 | O9_0036 1892 | O5_0074 1893 | O12_0102 1894 | O23_0042 1895 | T3_0122 1896 | O14_0011 1897 | O8_0144 1898 | O11_0079 1899 | O15_0107 1900 | T7_0049 1901 | T4_0085 1902 | O4_0029 1903 | T1_0038 1904 | O21_0014 1905 | O2_0057 1906 | O3_0125 1907 | S1_0114 1908 | O6_0101 1909 | S6_0040 1910 | O1_0098 1911 | S2_0052 1912 | S3_0082 1913 | O6_0102 1914 | O5_0013 1915 | O17_0013 1916 | T5_0054 1917 | S5_0099 1918 | O4_0012 1919 | O3_0041 1920 | O5_0094 1921 | S1_0042 1922 | T3_0089 1923 | S4_0035 1924 | O18_0103 1925 | O21_0068 1926 | O3_0019 1927 | T1_0094 1928 | O11_0057 1929 | O14_0110 1930 | O14_0069 1931 | O22_0086 1932 | O13_0104 1933 | T2_0094 1934 | O15_0052 1935 | S4_0069 1936 | O25_0119 1937 | T6_0123 1938 | O13_0129 1939 | O4_0117 1940 | O2_0028 1941 | O7_0027 1942 | O21_0009 1943 | O9_0054 1944 | O8_0009 1945 | O9_0110 1946 | T5_0012 1947 | S3_0057 1948 | O12_0139 1949 | O26_0010 1950 | O3_0131 1951 | O1_0023 1952 | T7_0086 1953 | O12_0043 1954 | T2_0125 1955 | O12_0044 1956 | O17_0107 1957 | T3_0104 1958 | T4_0075 1959 | T7_0058 1960 | S5_0016 1961 | O9_0062 1962 | O11_0082 1963 | O14_0058 1964 | T2_0081 1965 | O3_0120 1966 | T3_0090 1967 | S1_0023 1968 | O7_0034 1969 | O20_0011 1970 | S5_0065 1971 | S7_0109 1972 | T1_0087 1973 | O13_0084 1974 | T5_0112 1975 | O1_0132 1976 | O26_0031 1977 | O19_0074 1978 | T3_0117 1979 | O6_0114 1980 | O19_0067 1981 | O23_0076 1982 | T7_0027 1983 | O12_0015 1984 | O20_0095 1985 | O26_0026 1986 | T6_0056 1987 | O21_0012 1988 | T7_0021 1989 | O11_0081 1990 | O18_0116 1991 | O8_0051 1992 | S3_0023 1993 | T6_0103 1994 | O26_0079 1995 | S1_0066 1996 | O20_0108 1997 | S5_0024 1998 | O15_0021 1999 | S5_0074 2000 | T4_0086 2001 | O8_0137 2002 | O24_0044 2003 | S8_0096 2004 | S6_0024 2005 | O1_0011 2006 | T7_0142 2007 | O23_0038 2008 | O19_0100 2009 | T3_0005 2010 | T3_0101 2011 | O7_0123 2012 | S3_0049 2013 | O24_0067 2014 | O9_0130 2015 | S7_0024 2016 | O24_0031 2017 | S4_0068 2018 | O14_0061 2019 | O21_0010 2020 | O22_0062 2021 | T1_0135 2022 | T3_0137 2023 | O12_0035 2024 | O23_0009 2025 | O23_0126 2026 | O25_0009 2027 | O21_0056 2028 | O4_0017 2029 | O26_0129 2030 | S6_0031 2031 | O26_0044 2032 | O13_0047 2033 | T8_0068 2034 | S1_0009 2035 | O24_0055 2036 | O18_0070 2037 | O3_0008 2038 | T5_0013 2039 | O2_0091 2040 | T8_0132 2041 | O17_0111 2042 | O17_0015 2043 | T2_0070 2044 | O11_0083 2045 | O21_0083 2046 | O7_0117 2047 | O20_0055 2048 | S7_0050 2049 | T1_0133 2050 | S5_0054 2051 | O26_0101 2052 | T6_0120 2053 | O21_0026 2054 | O22_0070 2055 | S4_0105 2056 | T6_0111 2057 | O18_0022 2058 | O5_0015 2059 | O5_0112 2060 | O16_0034 2061 | S1_0107 2062 | O15_0081 2063 | O26_0112 2064 | O7_0106 2065 | S2_0100 2066 | O25_0005 2067 | O2_0102 2068 | T7_0019 2069 | O20_0061 2070 | T6_0110 2071 | O22_0122 2072 | O20_0029 2073 | T8_0073 2074 | S4_0034 2075 | S7_0110 2076 | T7_0038 2077 | O14_0124 2078 | S6_0057 2079 | O16_0044 2080 | T6_0022 2081 | O18_0127 2082 | S4_0129 2083 | O25_0117 2084 | O23_0131 2085 | O20_0008 2086 | O24_0035 2087 | S6_0117 2088 | T7_0137 2089 | T5_0026 2090 | T2_0057 2091 | O12_0076 2092 | S3_0039 2093 | O24_0040 2094 | O10_0023 2095 | O23_0071 2096 | T7_0138 2097 | S4_0094 2098 | T3_0025 2099 | T1_0006 2100 | O3_0054 2101 | S8_0013 2102 | S8_0089 2103 | O16_0023 2104 | O8_0032 2105 | O8_0004 2106 | O17_0117 2107 | T5_0067 2108 | O15_0032 2109 | O6_0044 2110 | S8_0079 2111 | O10_0013 2112 | T8_0115 2113 | O23_0003 2114 | O11_0105 2115 | O21_0095 2116 | O9_0099 2117 | O9_0107 2118 | O15_0121 2119 | S5_0117 2120 | O25_0079 2121 | O25_0094 2122 | O21_0028 2123 | S3_0009 2124 | O4_0045 2125 | O19_0006 2126 | O8_0134 2127 | O17_0080 2128 | O6_0077 2129 | T6_0119 2130 | O15_0044 2131 | O1_0087 2132 | T3_0105 2133 | O23_0066 2134 | T8_0127 2135 | S4_0032 2136 | T5_0025 2137 | T3_0102 2138 | O22_0066 2139 | T2_0067 2140 | O23_0053 2141 | S2_0076 2142 | O6_0104 2143 | T6_0035 2144 | O25_0032 2145 | T1_0059 2146 | S6_0039 2147 | S8_0091 2148 | O20_0097 2149 | O8_0122 2150 | S7_0084 2151 | O3_0102 2152 | O14_0083 2153 | O15_0068 2154 | O4_0095 2155 | O5_0077 2156 | T6_0036 2157 | O23_0068 2158 | O15_0019 2159 | O15_0030 2160 | O20_0084 2161 | O12_0098 2162 | O3_0118 2163 | O11_0031 2164 | S2_0108 2165 | O25_0072 2166 | O1_0003 2167 | S3_0062 2168 | S3_0058 2169 | O4_0088 2170 | O1_0073 2171 | O23_0080 2172 | S3_0089 2173 | T7_0070 2174 | O4_0005 2175 | O3_0128 2176 | O16_0118 2177 | O22_0026 2178 | O18_0086 2179 | T8_0034 2180 | O4_0079 2181 | O4_0115 2182 | T4_0022 2183 | S8_0097 2184 | T5_0072 2185 | T2_0124 2186 | O11_0092 2187 | O10_0114 2188 | O10_0087 2189 | O13_0027 2190 | T5_0043 2191 | O26_0020 2192 | O19_0108 2193 | O2_0127 2194 | O12_0023 2195 | S3_0041 2196 | O25_0091 2197 | O25_0042 2198 | S3_0070 2199 | O2_0113 2200 | S2_0099 2201 | O21_0119 2202 | O2_0087 2203 | O2_0092 2204 | O19_0094 2205 | T4_0046 2206 | O13_0016 2207 | O5_0098 2208 | O20_0022 2209 | O4_0003 2210 | T2_0068 2211 | S3_0020 2212 | T1_0003 2213 | S8_0005 2214 | O17_0021 2215 | O24_0127 2216 | S6_0061 2217 | S8_0076 2218 | O13_0109 2219 | O20_0096 2220 | O12_0086 2221 | O14_0068 2222 | O12_0057 2223 | O7_0111 2224 | O24_0128 2225 | O12_0031 2226 | O24_0117 2227 | O2_0117 2228 | O21_0130 2229 | T2_0143 2230 | S1_0105 2231 | O13_0069 2232 | O6_0046 2233 | O13_0081 2234 | O7_0064 2235 | O3_0006 2236 | O24_0063 2237 | T7_0132 2238 | O21_0092 2239 | O4_0098 2240 | O17_0076 2241 | O11_0028 2242 | O7_0059 2243 | O14_0076 2244 | O11_0069 2245 | T2_0083 2246 | O14_0013 2247 | S1_0119 2248 | T8_0070 2249 | O2_0060 2250 | S3_0030 2251 | O13_0026 2252 | O20_0083 2253 | O11_0038 2254 | S5_0079 2255 | S1_0115 2256 | T1_0101 2257 | O10_0089 2258 | S2_0075 2259 | O14_0074 2260 | O17_0023 2261 | O20_0020 2262 | S3_0080 2263 | O2_0103 2264 | O9_0089 2265 | S6_0127 2266 | S1_0034 2267 | O3_0129 2268 | O16_0036 2269 | S6_0035 2270 | O4_0122 2271 | S7_0098 2272 | O13_0091 2273 | S7_0042 2274 | O15_0109 2275 | O19_0032 2276 | O26_0012 2277 | O16_0022 2278 | T4_0068 2279 | T2_0141 2280 | O17_0090 2281 | O14_0125 2282 | S6_0118 2283 | O20_0073 2284 | T7_0045 2285 | O17_0030 2286 | S5_0100 2287 | T1_0037 2288 | S1_0047 2289 | O10_0060 2290 | O13_0106 2291 | O16_0028 2292 | O20_0124 2293 | S6_0075 2294 | O11_0087 2295 | T5_0016 2296 | O15_0097 2297 | T4_0073 2298 | O21_0120 2299 | O4_0070 2300 | O6_0094 2301 | O12_0022 2302 | O20_0054 2303 | S7_0058 2304 | S1_0078 2305 | O7_0028 2306 | S3_0047 2307 | O22_0019 2308 | O23_0130 2309 | O10_0075 2310 | T7_0139 2311 | O6_0004 2312 | S7_0142 2313 | S2_0107 2314 | S7_0127 2315 | T7_0034 2316 | S2_0088 2317 | O13_0105 2318 | O20_0075 2319 | O5_0060 2320 | T1_0054 2321 | O20_0002 2322 | O18_0091 2323 | S2_0068 2324 | S1_0036 2325 | S8_0030 2326 | T1_0134 2327 | S5_0009 2328 | S2_0045 2329 | O2_0070 2330 | T2_0061 2331 | T8_0121 2332 | S8_0130 2333 | T8_0058 2334 | O24_0032 2335 | S7_0069 2336 | O16_0082 2337 | O13_0002 2338 | T1_0080 2339 | O23_0116 2340 | O11_0104 2341 | S2_0087 2342 | S6_0030 2343 | O11_0102 2344 | S8_0022 2345 | T8_0087 2346 | O11_0042 2347 | T6_0080 2348 | O13_0116 2349 | O18_0032 2350 | T7_0002 2351 | S3_0055 2352 | O2_0021 2353 | O23_0077 2354 | O4_0116 2355 | O10_0116 2356 | O22_0060 2357 | S2_0040 2358 | T2_0140 2359 | O5_0142 2360 | O9_0120 2361 | T1_0083 2362 | O18_0107 2363 | T2_0136 2364 | T5_0038 2365 | O17_0115 2366 | T7_0024 2367 | O5_0102 2368 | S2_0118 2369 | S3_0128 2370 | T1_0066 2371 | O7_0086 2372 | O3_0108 2373 | T3_0126 2374 | O15_0043 2375 | T7_0088 2376 | S7_0111 2377 | O6_0009 2378 | O16_0117 2379 | O6_0131 2380 | O18_0073 2381 | O25_0060 2382 | O17_0079 2383 | O13_0023 2384 | O12_0135 2385 | O5_0129 2386 | T2_0063 2387 | S4_0033 2388 | O26_0067 2389 | O15_0099 2390 | O9_0093 2391 | O12_0085 2392 | O22_0114 2393 | O23_0078 2394 | O17_0024 2395 | T1_0093 2396 | S7_0067 2397 | O6_0033 2398 | O21_0109 2399 | T7_0112 2400 | S8_0068 2401 | O5_0008 2402 | O25_0093 2403 | O18_0085 2404 | O2_0023 2405 | O10_0062 2406 | T8_0075 2407 | T7_0122 2408 | S4_0059 2409 | O3_0005 2410 | O20_0035 2411 | O22_0039 2412 | S7_0023 2413 | O8_0028 2414 | O6_0074 2415 | O19_0113 2416 | O11_0023 2417 | O24_0039 2418 | O10_0034 2419 | T3_0013 2420 | S2_0101 2421 | O10_0048 2422 | T2_0062 2423 | O7_0038 2424 | S2_0048 2425 | O14_0060 2426 | O2_0094 2427 | O2_0119 2428 | O20_0119 2429 | T7_0120 2430 | O19_0075 2431 | O9_0055 2432 | T3_0115 2433 | O21_0041 2434 | S3_0112 2435 | O15_0013 2436 | S6_0028 2437 | O16_0014 2438 | O15_0094 2439 | O15_0064 2440 | S1_0067 2441 | O13_0083 2442 | S5_0116 2443 | O18_0101 2444 | O7_0093 2445 | O18_0050 2446 | S8_0014 2447 | O24_0129 2448 | O13_0072 2449 | O21_0011 2450 | O16_0107 2451 | O23_0021 2452 | O17_0124 2453 | O16_0026 2454 | O15_0065 2455 | O20_0130 2456 | T5_0037 2457 | O2_0010 2458 | S1_0082 2459 | S6_0088 2460 | -------------------------------------------------------------------------------- /txts/val.txt: -------------------------------------------------------------------------------- 1 | S8_0125 2 | O6_0121 3 | O5_0135 4 | T7_0065 5 | S4_0097 6 | O25_0044 7 | O4_0090 8 | O19_0052 9 | O11_0035 10 | S2_0077 11 | O26_0056 12 | T6_0075 13 | T5_0127 14 | T1_0095 15 | O19_0136 16 | O25_0113 17 | O6_0072 18 | O13_0065 19 | O25_0105 20 | O24_0007 21 | S3_0090 22 | T1_0058 23 | O1_0067 24 | O20_0066 25 | O6_0012 26 | O24_0018 27 | O18_0053 28 | O1_0071 29 | O13_0057 30 | O13_0022 31 | O25_0124 32 | O19_0128 33 | O14_0035 34 | T8_0096 35 | O1_0079 36 | S3_0025 37 | O21_0070 38 | O2_0035 39 | T2_0012 40 | O2_0120 41 | O18_0061 42 | O8_0017 43 | S6_0095 44 | O6_0115 45 | S5_0061 46 | O8_0084 47 | T6_0008 48 | S2_0095 49 | T2_0036 50 | S7_0065 51 | O19_0131 52 | O24_0069 53 | O22_0015 54 | S5_0048 55 | O4_0109 56 | O7_0082 57 | O2_0043 58 | O4_0132 59 | O9_0030 60 | S7_0122 61 | O22_0127 62 | T3_0080 63 | O19_0028 64 | O5_0120 65 | T1_0069 66 | S7_0066 67 | O11_0114 68 | T7_0036 69 | S1_0016 70 | T2_0076 71 | S1_0071 72 | T6_0094 73 | O14_0115 74 | O18_0056 75 | S5_0058 76 | T5_0076 77 | O10_0067 78 | O17_0096 79 | O13_0087 80 | O4_0102 81 | O11_0054 82 | S1_0061 83 | O6_0028 84 | O15_0025 85 | O24_0095 86 | T6_0007 87 | S8_0104 88 | O18_0034 89 | O3_0012 90 | O19_0079 91 | T4_0090 92 | O3_0107 93 | O24_0103 94 | T8_0055 95 | S5_0111 96 | T5_0080 97 | O15_0045 98 | O23_0091 99 | T2_0118 100 | O12_0013 101 | S2_0116 102 | T4_0107 103 | T2_0034 104 | T4_0043 105 | O12_0104 106 | T7_0103 107 | T1_0113 108 | O15_0015 109 | O17_0056 110 | T6_0122 111 | O4_0026 112 | O3_0023 113 | O25_0075 114 | O1_0118 115 | O14_0077 116 | S3_0019 117 | O7_0004 118 | O10_0098 119 | O6_0055 120 | O15_0098 121 | O14_0147 122 | O8_0006 123 | T2_0046 124 | S7_0071 125 | O2_0039 126 | O15_0122 127 | O26_0116 128 | O17_0089 129 | T1_0040 130 | O15_0117 131 | O6_0066 132 | O5_0105 133 | O16_0115 134 | O7_0067 135 | S6_0113 136 | T2_0007 137 | O2_0063 138 | O21_0040 139 | T8_0090 140 | O23_0020 141 | S6_0106 142 | O14_0117 143 | T7_0043 144 | O3_0033 145 | O7_0001 146 | O6_0128 147 | T6_0132 148 | O23_0093 149 | S4_0056 150 | S8_0121 151 | O26_0121 152 | O1_0095 153 | O3_0017 154 | S6_0054 155 | S4_0019 156 | O8_0113 157 | T3_0052 158 | S3_0050 159 | S1_0080 160 | S4_0104 161 | O7_0116 162 | O15_0020 163 | T3_0128 164 | S4_0041 165 | O21_0087 166 | O3_0077 167 | T1_0020 168 | O20_0064 169 | O25_0077 170 | O21_0025 171 | T7_0082 172 | O9_0100 173 | O12_0123 174 | T5_0105 175 | O13_0097 176 | O6_0030 177 | T2_0075 178 | O7_0048 179 | O26_0105 180 | O4_0038 181 | O12_0127 182 | O23_0108 183 | O21_0032 184 | O5_0137 185 | O1_0042 186 | O19_0043 187 | O16_0042 188 | T4_0126 189 | O14_0131 190 | O16_0053 191 | O16_0063 192 | O11_0027 193 | S6_0086 194 | S4_0025 195 | T1_0005 196 | O4_0016 197 | O3_0028 198 | O2_0110 199 | T4_0118 200 | S1_0086 201 | T6_0116 202 | O19_0086 203 | O16_0049 204 | O26_0073 205 | S8_0060 206 | S2_0113 207 | S2_0059 208 | O8_0128 209 | S3_0069 210 | O17_0067 211 | S1_0112 212 | O5_0139 213 | T6_0109 214 | O16_0102 215 | O9_0010 216 | S2_0044 217 | S5_0060 218 | T5_0041 219 | O13_0038 220 | S8_0108 221 | O8_0076 222 | O18_0129 223 | O25_0030 224 | S6_0020 225 | O11_0045 226 | O23_0049 227 | O24_0082 228 | O16_0093 229 | O22_0081 230 | O14_0044 231 | S4_0027 232 | O24_0026 233 | O12_0046 234 | S5_0094 235 | S7_0063 236 | O5_0069 237 | O8_0046 238 | T4_0082 239 | T8_0043 240 | O11_0022 241 | O4_0103 242 | O9_0066 243 | O22_0118 244 | S3_0075 245 | O9_0072 246 | O6_0139 247 | O16_0130 248 | O26_0084 249 | T2_0135 250 | O20_0107 251 | O14_0037 252 | O22_0023 253 | O5_0003 254 | S2_0078 255 | S5_0010 256 | O26_0037 257 | O18_0036 258 | S7_0061 259 | O5_0076 260 | O10_0122 261 | O10_0084 262 | O23_0034 263 | O16_0089 264 | O6_0056 265 | O25_0024 266 | T7_0092 267 | O21_0030 268 | T7_0046 269 | O15_0112 270 | O26_0055 271 | O23_0127 272 | O14_0033 273 | O12_0073 274 | T8_0050 275 | S3_0035 276 | S3_0007 277 | T4_0012 278 | O14_0133 279 | O17_0020 280 | S5_0023 281 | O21_0039 282 | O8_0127 283 | S8_0020 284 | O20_0026 285 | O21_0004 286 | O17_0006 287 | S2_0072 288 | O5_0046 289 | O7_0080 290 | S8_0061 291 | O1_0055 292 | O1_0101 293 | O20_0133 294 | T2_0113 295 | S6_0026 296 | O25_0096 297 | T1_0073 298 | T7_0101 299 | S6_0042 300 | S8_0124 301 | O26_0103 302 | T8_0013 303 | T2_0015 304 | O11_0074 305 | O22_0078 306 | S6_0125 307 | O12_0062 308 | S7_0051 309 | O26_0127 310 | T1_0060 311 | S7_0121 312 | T3_0024 313 | T3_0023 314 | T3_0120 315 | O2_0107 316 | O13_0060 317 | O9_0074 318 | S6_0012 319 | O17_0088 320 | S5_0082 321 | O7_0036 322 | O13_0075 323 | O18_0023 324 | O9_0025 325 | O4_0127 326 | T5_0042 327 | O8_0079 328 | T6_0064 329 | O12_0124 330 | S7_0079 331 | T8_0064 332 | T4_0038 333 | O24_0066 334 | O26_0016 335 | O10_0080 336 | O3_0098 337 | O7_0121 338 | T1_0140 339 | T2_0071 340 | S8_0106 341 | O17_0116 342 | O20_0100 343 | O16_0076 344 | O21_0018 345 | O25_0084 346 | S7_0040 347 | O11_0066 348 | O1_0015 349 | O7_0030 350 | T3_0003 351 | S8_0003 352 | O25_0088 353 | S2_0123 354 | O9_0112 355 | O10_0051 356 | T3_0132 357 | T5_0078 358 | O19_0106 359 | O26_0023 360 | S1_0012 361 | T4_0019 362 | O3_0039 363 | T8_0009 364 | O21_0079 365 | O10_0059 366 | T3_0127 367 | O4_0064 368 | O6_0019 369 | O20_0132 370 | O9_0113 371 | O12_0049 372 | O21_0045 373 | O8_0018 374 | O10_0021 375 | O16_0046 376 | S1_0081 377 | S1_0120 378 | O22_0022 379 | O15_0093 380 | S1_0117 381 | O17_0041 382 | O17_0054 383 | O13_0096 384 | T5_0011 385 | O14_0057 386 | T8_0012 387 | O24_0014 388 | O19_0064 389 | O20_0091 390 | S6_0034 391 | O18_0063 392 | T1_0118 393 | S3_0108 394 | T7_0030 395 | T4_0113 396 | O11_0123 397 | T7_0049 398 | O10_0045 399 | O24_0122 400 | T1_0012 401 | T5_0054 402 | O4_0012 403 | O8_0071 404 | T7_0102 405 | O15_0052 406 | O2_0121 407 | S7_0126 408 | O20_0116 409 | S4_0016 410 | O5_0084 411 | O18_0060 412 | O9_0054 413 | S2_0038 414 | S3_0057 415 | O5_0071 416 | T4_0075 417 | T7_0105 418 | O22_0128 419 | T1_0132 420 | O20_0011 421 | O13_0084 422 | S1_0077 423 | T7_0027 424 | O11_0009 425 | O1_0105 426 | T3_0019 427 | T3_0066 428 | O23_0090 429 | O21_0010 430 | O3_0001 431 | O13_0047 432 | O18_0027 433 | S3_0118 434 | O10_0065 435 | O8_0014 436 | O20_0125 437 | O15_0081 438 | O19_0096 439 | T6_0110 440 | O22_0122 441 | O16_0044 442 | O24_0035 443 | O26_0091 444 | O10_0033 445 | S5_0117 446 | O8_0049 447 | O7_0127 448 | O17_0080 449 | T3_0012 450 | S4_0032 451 | O9_0086 452 | O6_0104 453 | S3_0124 454 | O22_0043 455 | O12_0009 456 | O11_0025 457 | O15_0019 458 | O1_0078 459 | S2_0108 460 | O7_0013 461 | O20_0062 462 | O3_0128 463 | O22_0044 464 | O4_0079 465 | T5_0001 466 | T5_0043 467 | O19_0108 468 | O14_0072 469 | O2_0087 470 | T8_0122 471 | S8_0005 472 | O12_0086 473 | S1_0055 474 | O18_0016 475 | T4_0014 476 | O14_0135 477 | O2_0103 478 | S4_0100 479 | O9_0052 480 | O5_0047 481 | O17_0030 482 | O10_0060 483 | O11_0087 484 | T2_0137 485 | S5_0096 486 | S7_0013 487 | O23_0120 488 | T6_0033 489 | S7_0113 490 | O5_0037 491 | O3_0093 492 | S6_0030 493 | S8_0073 494 | T6_0101 495 | O8_0139 496 | T8_0082 497 | O22_0011 498 | O12_0055 499 | O23_0099 500 | O5_0008 501 | O18_0085 502 | O12_0065 503 | O20_0035 504 | T3_0013 505 | O7_0038 506 | O2_0037 507 | S5_0041 508 | O2_0064 509 | O13_0083 510 | S4_0077 511 | O23_0021 512 | O24_0029 513 | O5_0054 514 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | 4 | import numpy as np 5 | import torch 6 | 7 | COLOR_LIST = [ 8 | [255, 255, 255], [190, 210, 255], [0, 255, 197], [38, 115, 0], 9 | [163, 255, 115], [76, 230, 0], [85, 255, 0], [115, 115, 0], 10 | [168, 168, 0], [255, 255, 0], [115, 178, 255], [0, 92, 230], 11 | [0, 38, 115], [122, 142, 245], [0, 168, 132], [115, 0, 0], 12 | [255, 127, 127], [255, 190, 190], [255, 190, 232], [255, 0, 197], 13 | [230, 0, 169], [168, 0, 132], [115, 0, 76], [255, 115, 223], 14 | [161, 161, 161] 15 | ] 16 | 17 | COLOR_LIST_BGR = [color[::-1] for color in COLOR_LIST] 18 | 19 | 20 | def map_to_color(img, label=None): 21 | img = img.astype(np.int8) + 1 22 | img_color = np.take(COLOR_LIST_BGR, img, axis=0, mode='clip') 23 | 24 | if label is not None: 25 | img_color[label == -1] = COLOR_LIST_BGR[0] 26 | else: 27 | img_color[img == 0] = COLOR_LIST_BGR[0] 28 | 29 | return img_color 30 | 31 | 32 | def set_seed(seed): 33 | random.seed(seed) 34 | os.environ["PYTHONSEED"] = str(seed) 35 | np.random.seed(seed) 36 | torch.cuda.manual_seed(seed) 37 | torch.cuda.manual_seed_all(seed) 38 | torch.backends.cudnn.deterministic = True 39 | torch.backends.cudnn.benchmark = False 40 | torch.backends.cudnn.enabled = True 41 | torch.manual_seed(seed) 42 | 43 | 44 | def gen_confusion_matrix(num_class, img_predict, img_label): 45 | mask = (img_label != -1) 46 | label = num_class * img_label[mask] + img_predict[mask] 47 | count = torch.bincount(label, minlength=num_class ** 2) 48 | confusion_matrix = count.reshape(num_class, num_class) 49 | return confusion_matrix 50 | 51 | 52 | def eval_metrics(confusion_matrix, mode='ts'): 53 | eps = 1e-7 54 | 55 | unique_index = np.where(np.sum(confusion_matrix, axis=1) != 0)[0] 56 | confusion_matrix = confusion_matrix[unique_index, :] 57 | confusion_matrix = confusion_matrix[:, unique_index] 58 | 59 | a = np.diag(confusion_matrix) 60 | b = np.sum(confusion_matrix, axis=0) 61 | c = np.sum(confusion_matrix, axis=1) 62 | 63 | pa = a / (c + eps) 64 | ua = a / (b + eps) 65 | f1 = 2 * pa * ua / (pa + ua + eps) 66 | mean_f1 = np.nanmean(f1) 67 | 68 | oa = np.sum(a) / np.sum(confusion_matrix) 69 | 70 | pe = np.sum(b * c) / (np.sum(c) * np.sum(c)) 71 | kappa = (oa - pe) / (1 - pe) 72 | 73 | intersection = np.diag(confusion_matrix) 74 | union = np.sum(confusion_matrix, axis=1) + np.sum(confusion_matrix, axis=0) - np.diag(confusion_matrix) 75 | iou = intersection / union 76 | mean_iou = np.nanmean(iou) 77 | 78 | f1 = np.round(f1, 3) 79 | if mode == 'ts': 80 | return mean_f1, oa, kappa, mean_iou, f1 81 | else: 82 | return mean_f1 83 | --------------------------------------------------------------------------------