├── LICENSE ├── README.md ├── datasets ├── README.md └── dataset_synapse.py ├── lists └── lists_Synapse │ ├── all.lst │ ├── test_vol.txt │ └── train.txt ├── networks ├── vit_seg_configs.py ├── vit_seg_modeling.py └── vit_seg_modeling_resnet_skip.py ├── requirements.txt ├── test.py ├── train.py ├── trainer.py └── utils.py /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TransUNet 2 | This repo holds code for [TransUNet: Transformers Make Strong Encoders for Medical Image Segmentation](https://arxiv.org/pdf/2102.04306.pdf) 3 | 4 | ## 📰 News 5 | - [7/26/2024] TransUNet, which supports both 2D and 3D data and incorporates a Transformer encoder and decoder, has been featured in the journal Medical Image Analysis ([link](https://www.sciencedirect.com/science/article/pii/S1361841524002056)). 6 | ```bibtex 7 | @article{chen2024transunet, 8 | title={TransUNet: Rethinking the U-Net architecture design for medical image segmentation through the lens of transformers}, 9 | author={Chen, Jieneng and Mei, Jieru and Li, Xianhang and Lu, Yongyi and Yu, Qihang and Wei, Qingyue and Luo, Xiangde and Xie, Yutong and Adeli, Ehsan and Wang, Yan and others}, 10 | journal={Medical Image Analysis}, 11 | pages={103280}, 12 | year={2024}, 13 | publisher={Elsevier} 14 | } 15 | ``` 16 | 17 | - [10/15/2023] 🔥 3D version of TransUNet is out! Our 3D TransUNet surpasses nn-UNet with 88.11% Dice score on the BTCV dataset and outperforms the top-1 solution in the BraTs 2021 challenge and secure the second place in BraTs 2023 challenge. Please take a look at the [code](https://github.com/Beckschen/3D-TransUNet/tree/main) and [paper](https://arxiv.org/abs/2310.07781). 18 | 19 | 20 | ## Usage 21 | 22 | ### 1. Download Google pre-trained ViT models 23 | * [Get models in this link](https://console.cloud.google.com/storage/vit_models/): R50-ViT-B_16, ViT-B_16, ViT-L_16... 24 | ```bash 25 | wget https://storage.googleapis.com/vit_models/imagenet21k/{MODEL_NAME}.npz && 26 | mkdir ../model/vit_checkpoint/imagenet21k && 27 | mv {MODEL_NAME}.npz ../model/vit_checkpoint/imagenet21k/{MODEL_NAME}.npz 28 | ``` 29 | 30 | ### 2. Prepare data (All data are available!) 31 | 32 | All data are available so no need to send emails for data. Please use the [BTCV preprocessed data](https://drive.google.com/drive/folders/1ACJEoTp-uqfFJ73qS3eUObQh52nGuzCd?usp=sharing) and [ACDC data](https://drive.google.com/drive/folders/1KQcrci7aKsYZi1hQoZ3T3QUtcy7b--n4?usp=drive_link). 33 | 34 | ### 3. Environment 35 | 36 | Please prepare an environment with python=3.7, and then use the command "pip install -r requirements.txt" for the dependencies. 37 | 38 | ### 4. Train/Test 39 | 40 | - Run the train script on synapse dataset. The batch size can be reduced to 12 or 6 to save memory (please also decrease the base_lr linearly), and both can reach similar performance. 41 | 42 | ```bash 43 | CUDA_VISIBLE_DEVICES=0 python train.py --dataset Synapse --vit_name R50-ViT-B_16 44 | ``` 45 | 46 | - Run the test script on synapse dataset. It supports testing for both 2D images and 3D volumes. 47 | 48 | ```bash 49 | python test.py --dataset Synapse --vit_name R50-ViT-B_16 50 | ``` 51 | 52 | ## Reference 53 | * [Google ViT](https://github.com/google-research/vision_transformer) 54 | * [ViT-pytorch](https://github.com/jeonsworld/ViT-pytorch) 55 | * [segmentation_models.pytorch](https://github.com/qubvel/segmentation_models.pytorch) 56 | 57 | ## Citations 58 | 59 | 60 | ```bibtex 61 | @article{chen2021transunet, 62 | title={TransUNet: Transformers Make Strong Encoders for Medical Image Segmentation}, 63 | author={Chen, Jieneng and Lu, Yongyi and Yu, Qihang and Luo, Xiangde and Adeli, Ehsan and Wang, Yan and Lu, Le and Yuille, Alan L., and Zhou, Yuyin}, 64 | journal={arXiv preprint arXiv:2102.04306}, 65 | year={2021} 66 | } 67 | ``` 68 | -------------------------------------------------------------------------------- /datasets/README.md: -------------------------------------------------------------------------------- 1 | # Data Preparing 2 | 3 | 1. Access to the synapse multi-organ dataset: 4 | 1. Sign up in the [official Synapse website](https://www.synapse.org/#!Synapse:syn3193805/wiki/) and download the dataset. Convert them to numpy format, clip the images within [-125, 275], normalize each 3D image to [0, 1], and extract 2D slices from 3D volume for training cases while keeping the 3D volume in h5 format for testing cases. 5 | 2. You can also send an Email directly to jienengchen01 AT gmail.com to request the preprocessed data for reproduction. 6 | 2. The directory structure of the whole project is as follows: 7 | 8 | ```bash 9 | . 10 | ├── TransUNet 11 | │   ├──datasets 12 | │   │    └── dataset_*.py 13 | │   ├──train.py 14 | │   ├──test.py 15 | │   └──... 16 | ├── model 17 | │   └── vit_checkpoint 18 | │   └── imagenet21k 19 | │      ├── R50+ViT-B_16.npz 20 | │      └── *.npz 21 | └── data 22 | └──Synapse 23 | ├── test_vol_h5 24 | │   ├── case0001.npy.h5 25 | │   └── *.npy.h5 26 | └── train_npz 27 | ├── case0005_slice000.npz 28 | └── *.npz 29 | ``` 30 | -------------------------------------------------------------------------------- /datasets/dataset_synapse.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import h5py 4 | import numpy as np 5 | import torch 6 | from scipy import ndimage 7 | from scipy.ndimage.interpolation import zoom 8 | from torch.utils.data import Dataset 9 | 10 | 11 | def random_rot_flip(image, label): 12 | k = np.random.randint(0, 4) 13 | image = np.rot90(image, k) 14 | label = np.rot90(label, k) 15 | axis = np.random.randint(0, 2) 16 | image = np.flip(image, axis=axis).copy() 17 | label = np.flip(label, axis=axis).copy() 18 | return image, label 19 | 20 | 21 | def random_rotate(image, label): 22 | angle = np.random.randint(-20, 20) 23 | image = ndimage.rotate(image, angle, order=0, reshape=False) 24 | label = ndimage.rotate(label, angle, order=0, reshape=False) 25 | return image, label 26 | 27 | 28 | class RandomGenerator(object): 29 | def __init__(self, output_size): 30 | self.output_size = output_size 31 | 32 | def __call__(self, sample): 33 | image, label = sample['image'], sample['label'] 34 | 35 | if random.random() > 0.5: 36 | image, label = random_rot_flip(image, label) 37 | elif random.random() > 0.5: 38 | image, label = random_rotate(image, label) 39 | x, y = image.shape 40 | if x != self.output_size[0] or y != self.output_size[1]: 41 | image = zoom(image, (self.output_size[0] / x, self.output_size[1] / y), order=3) # why not 3? 42 | label = zoom(label, (self.output_size[0] / x, self.output_size[1] / y), order=0) 43 | image = torch.from_numpy(image.astype(np.float32)).unsqueeze(0) 44 | label = torch.from_numpy(label.astype(np.float32)) 45 | sample = {'image': image, 'label': label.long()} 46 | return sample 47 | 48 | 49 | class Synapse_dataset(Dataset): 50 | def __init__(self, base_dir, list_dir, split, transform=None): 51 | self.transform = transform # using transform in torch! 52 | self.split = split 53 | self.sample_list = open(os.path.join(list_dir, self.split+'.txt')).readlines() 54 | self.data_dir = base_dir 55 | 56 | def __len__(self): 57 | return len(self.sample_list) 58 | 59 | def __getitem__(self, idx): 60 | if self.split == "train": 61 | slice_name = self.sample_list[idx].strip('\n') 62 | data_path = os.path.join(self.data_dir, slice_name+'.npz') 63 | data = np.load(data_path) 64 | image, label = data['image'], data['label'] 65 | else: 66 | vol_name = self.sample_list[idx].strip('\n') 67 | filepath = self.data_dir + "/{}.npy.h5".format(vol_name) 68 | data = h5py.File(filepath) 69 | image, label = data['image'][:], data['label'][:] 70 | 71 | sample = {'image': image, 'label': label} 72 | if self.transform: 73 | sample = self.transform(sample) 74 | sample['case_name'] = self.sample_list[idx].strip('\n') 75 | return sample 76 | -------------------------------------------------------------------------------- /lists/lists_Synapse/all.lst: -------------------------------------------------------------------------------- 1 | case0031.npy.h5 2 | case0007.npy.h5 3 | case0009.npy.h5 4 | case0005.npy.h5 5 | case0026.npy.h5 6 | case0039.npy.h5 7 | case0024.npy.h5 8 | case0034.npy.h5 9 | case0033.npy.h5 10 | case0030.npy.h5 11 | case0023.npy.h5 12 | case0040.npy.h5 13 | case0010.npy.h5 14 | case0021.npy.h5 15 | case0006.npy.h5 16 | case0027.npy.h5 17 | case0028.npy.h5 18 | case0037.npy.h5 19 | case0008.npy.h5 20 | case0022.npy.h5 21 | case0038.npy.h5 22 | case0036.npy.h5 23 | case0032.npy.h5 24 | case0002.npy.h5 25 | case0029.npy.h5 26 | case0003.npy.h5 27 | case0001.npy.h5 28 | case0004.npy.h5 29 | case0025.npy.h5 30 | case0035.npy.h5 31 | -------------------------------------------------------------------------------- /lists/lists_Synapse/test_vol.txt: -------------------------------------------------------------------------------- 1 | case0008 2 | case0022 3 | case0038 4 | case0036 5 | case0032 6 | case0002 7 | case0029 8 | case0003 9 | case0001 10 | case0004 11 | case0025 12 | case0035 13 | -------------------------------------------------------------------------------- /lists/lists_Synapse/train.txt: -------------------------------------------------------------------------------- 1 | case0031_slice000 2 | case0031_slice001 3 | case0031_slice002 4 | case0031_slice003 5 | case0031_slice004 6 | case0031_slice005 7 | case0031_slice006 8 | case0031_slice007 9 | case0031_slice008 10 | case0031_slice009 11 | case0031_slice010 12 | case0031_slice011 13 | case0031_slice012 14 | case0031_slice013 15 | case0031_slice014 16 | case0031_slice015 17 | case0031_slice016 18 | case0031_slice017 19 | case0031_slice018 20 | case0031_slice019 21 | case0031_slice020 22 | case0031_slice021 23 | case0031_slice022 24 | case0031_slice023 25 | case0031_slice024 26 | case0031_slice025 27 | case0031_slice026 28 | case0031_slice027 29 | case0031_slice028 30 | case0031_slice029 31 | case0031_slice030 32 | case0031_slice031 33 | case0031_slice032 34 | case0031_slice033 35 | case0031_slice034 36 | case0031_slice035 37 | case0031_slice036 38 | case0031_slice037 39 | case0031_slice038 40 | case0031_slice039 41 | case0031_slice040 42 | case0031_slice041 43 | case0031_slice042 44 | case0031_slice043 45 | case0031_slice044 46 | case0031_slice045 47 | case0031_slice046 48 | case0031_slice047 49 | case0031_slice048 50 | case0031_slice049 51 | case0031_slice050 52 | case0031_slice051 53 | case0031_slice052 54 | case0031_slice053 55 | case0031_slice054 56 | case0031_slice055 57 | case0031_slice056 58 | case0031_slice057 59 | case0031_slice058 60 | case0031_slice059 61 | case0031_slice060 62 | case0031_slice061 63 | case0031_slice062 64 | case0031_slice063 65 | case0031_slice064 66 | case0031_slice065 67 | case0031_slice066 68 | case0031_slice067 69 | case0031_slice068 70 | case0031_slice069 71 | case0031_slice070 72 | case0031_slice071 73 | case0031_slice072 74 | case0031_slice073 75 | case0031_slice074 76 | case0031_slice075 77 | case0031_slice076 78 | case0031_slice077 79 | case0031_slice078 80 | case0031_slice079 81 | case0031_slice080 82 | case0031_slice081 83 | case0031_slice082 84 | case0031_slice083 85 | case0031_slice084 86 | case0031_slice085 87 | case0031_slice086 88 | case0031_slice087 89 | case0031_slice088 90 | case0031_slice089 91 | case0031_slice090 92 | case0031_slice091 93 | case0031_slice092 94 | case0007_slice000 95 | case0007_slice001 96 | case0007_slice002 97 | case0007_slice003 98 | case0007_slice004 99 | case0007_slice005 100 | case0007_slice006 101 | case0007_slice007 102 | case0007_slice008 103 | case0007_slice009 104 | case0007_slice010 105 | case0007_slice011 106 | case0007_slice012 107 | case0007_slice013 108 | case0007_slice014 109 | case0007_slice015 110 | case0007_slice016 111 | case0007_slice017 112 | case0007_slice018 113 | case0007_slice019 114 | case0007_slice020 115 | case0007_slice021 116 | case0007_slice022 117 | case0007_slice023 118 | case0007_slice024 119 | case0007_slice025 120 | case0007_slice026 121 | case0007_slice027 122 | case0007_slice028 123 | case0007_slice029 124 | case0007_slice030 125 | case0007_slice031 126 | case0007_slice032 127 | case0007_slice033 128 | case0007_slice034 129 | case0007_slice035 130 | case0007_slice036 131 | case0007_slice037 132 | case0007_slice038 133 | case0007_slice039 134 | case0007_slice040 135 | case0007_slice041 136 | case0007_slice042 137 | case0007_slice043 138 | case0007_slice044 139 | case0007_slice045 140 | case0007_slice046 141 | case0007_slice047 142 | case0007_slice048 143 | case0007_slice049 144 | case0007_slice050 145 | case0007_slice051 146 | case0007_slice052 147 | case0007_slice053 148 | case0007_slice054 149 | case0007_slice055 150 | case0007_slice056 151 | case0007_slice057 152 | case0007_slice058 153 | case0007_slice059 154 | case0007_slice060 155 | case0007_slice061 156 | case0007_slice062 157 | case0007_slice063 158 | case0007_slice064 159 | case0007_slice065 160 | case0007_slice066 161 | case0007_slice067 162 | case0007_slice068 163 | case0007_slice069 164 | case0007_slice070 165 | case0007_slice071 166 | case0007_slice072 167 | case0007_slice073 168 | case0007_slice074 169 | case0007_slice075 170 | case0007_slice076 171 | case0007_slice077 172 | case0007_slice078 173 | case0007_slice079 174 | case0007_slice080 175 | case0007_slice081 176 | case0007_slice082 177 | case0007_slice083 178 | case0007_slice084 179 | case0007_slice085 180 | case0007_slice086 181 | case0007_slice087 182 | case0007_slice088 183 | case0007_slice089 184 | case0007_slice090 185 | case0007_slice091 186 | case0007_slice092 187 | case0007_slice093 188 | case0007_slice094 189 | case0007_slice095 190 | case0007_slice096 191 | case0007_slice097 192 | case0007_slice098 193 | case0007_slice099 194 | case0007_slice100 195 | case0007_slice101 196 | case0007_slice102 197 | case0007_slice103 198 | case0007_slice104 199 | case0007_slice105 200 | case0007_slice106 201 | case0007_slice107 202 | case0007_slice108 203 | case0007_slice109 204 | case0007_slice110 205 | case0007_slice111 206 | case0007_slice112 207 | case0007_slice113 208 | case0007_slice114 209 | case0007_slice115 210 | case0007_slice116 211 | case0007_slice117 212 | case0007_slice118 213 | case0007_slice119 214 | case0007_slice120 215 | case0007_slice121 216 | case0007_slice122 217 | case0007_slice123 218 | case0007_slice124 219 | case0007_slice125 220 | case0007_slice126 221 | case0007_slice127 222 | case0007_slice128 223 | case0007_slice129 224 | case0007_slice130 225 | case0007_slice131 226 | case0007_slice132 227 | case0007_slice133 228 | case0007_slice134 229 | case0007_slice135 230 | case0007_slice136 231 | case0007_slice137 232 | case0007_slice138 233 | case0007_slice139 234 | case0007_slice140 235 | case0007_slice141 236 | case0007_slice142 237 | case0007_slice143 238 | case0007_slice144 239 | case0007_slice145 240 | case0007_slice146 241 | case0007_slice147 242 | case0007_slice148 243 | case0007_slice149 244 | case0007_slice150 245 | case0007_slice151 246 | case0007_slice152 247 | case0007_slice153 248 | case0007_slice154 249 | case0007_slice155 250 | case0007_slice156 251 | case0007_slice157 252 | case0007_slice158 253 | case0007_slice159 254 | case0007_slice160 255 | case0007_slice161 256 | case0007_slice162 257 | case0009_slice000 258 | case0009_slice001 259 | case0009_slice002 260 | case0009_slice003 261 | case0009_slice004 262 | case0009_slice005 263 | case0009_slice006 264 | case0009_slice007 265 | case0009_slice008 266 | case0009_slice009 267 | case0009_slice010 268 | case0009_slice011 269 | case0009_slice012 270 | case0009_slice013 271 | case0009_slice014 272 | case0009_slice015 273 | case0009_slice016 274 | case0009_slice017 275 | case0009_slice018 276 | case0009_slice019 277 | case0009_slice020 278 | case0009_slice021 279 | case0009_slice022 280 | case0009_slice023 281 | case0009_slice024 282 | case0009_slice025 283 | case0009_slice026 284 | case0009_slice027 285 | case0009_slice028 286 | case0009_slice029 287 | case0009_slice030 288 | case0009_slice031 289 | case0009_slice032 290 | case0009_slice033 291 | case0009_slice034 292 | case0009_slice035 293 | case0009_slice036 294 | case0009_slice037 295 | case0009_slice038 296 | case0009_slice039 297 | case0009_slice040 298 | case0009_slice041 299 | case0009_slice042 300 | case0009_slice043 301 | case0009_slice044 302 | case0009_slice045 303 | case0009_slice046 304 | case0009_slice047 305 | case0009_slice048 306 | case0009_slice049 307 | case0009_slice050 308 | case0009_slice051 309 | case0009_slice052 310 | case0009_slice053 311 | case0009_slice054 312 | case0009_slice055 313 | case0009_slice056 314 | case0009_slice057 315 | case0009_slice058 316 | case0009_slice059 317 | case0009_slice060 318 | case0009_slice061 319 | case0009_slice062 320 | case0009_slice063 321 | case0009_slice064 322 | case0009_slice065 323 | case0009_slice066 324 | case0009_slice067 325 | case0009_slice068 326 | case0009_slice069 327 | case0009_slice070 328 | case0009_slice071 329 | case0009_slice072 330 | case0009_slice073 331 | case0009_slice074 332 | case0009_slice075 333 | case0009_slice076 334 | case0009_slice077 335 | case0009_slice078 336 | case0009_slice079 337 | case0009_slice080 338 | case0009_slice081 339 | case0009_slice082 340 | case0009_slice083 341 | case0009_slice084 342 | case0009_slice085 343 | case0009_slice086 344 | case0009_slice087 345 | case0009_slice088 346 | case0009_slice089 347 | case0009_slice090 348 | case0009_slice091 349 | case0009_slice092 350 | case0009_slice093 351 | case0009_slice094 352 | case0009_slice095 353 | case0009_slice096 354 | case0009_slice097 355 | case0009_slice098 356 | case0009_slice099 357 | case0009_slice100 358 | case0009_slice101 359 | case0009_slice102 360 | case0009_slice103 361 | case0009_slice104 362 | case0009_slice105 363 | case0009_slice106 364 | case0009_slice107 365 | case0009_slice108 366 | case0009_slice109 367 | case0009_slice110 368 | case0009_slice111 369 | case0009_slice112 370 | case0009_slice113 371 | case0009_slice114 372 | case0009_slice115 373 | case0009_slice116 374 | case0009_slice117 375 | case0009_slice118 376 | case0009_slice119 377 | case0009_slice120 378 | case0009_slice121 379 | case0009_slice122 380 | case0009_slice123 381 | case0009_slice124 382 | case0009_slice125 383 | case0009_slice126 384 | case0009_slice127 385 | case0009_slice128 386 | case0009_slice129 387 | case0009_slice130 388 | case0009_slice131 389 | case0009_slice132 390 | case0009_slice133 391 | case0009_slice134 392 | case0009_slice135 393 | case0009_slice136 394 | case0009_slice137 395 | case0009_slice138 396 | case0009_slice139 397 | case0009_slice140 398 | case0009_slice141 399 | case0009_slice142 400 | case0009_slice143 401 | case0009_slice144 402 | case0009_slice145 403 | case0009_slice146 404 | case0009_slice147 405 | case0009_slice148 406 | case0005_slice000 407 | case0005_slice001 408 | case0005_slice002 409 | case0005_slice003 410 | case0005_slice004 411 | case0005_slice005 412 | case0005_slice006 413 | case0005_slice007 414 | case0005_slice008 415 | case0005_slice009 416 | case0005_slice010 417 | case0005_slice011 418 | case0005_slice012 419 | case0005_slice013 420 | case0005_slice014 421 | case0005_slice015 422 | case0005_slice016 423 | case0005_slice017 424 | case0005_slice018 425 | case0005_slice019 426 | case0005_slice020 427 | case0005_slice021 428 | case0005_slice022 429 | case0005_slice023 430 | case0005_slice024 431 | case0005_slice025 432 | case0005_slice026 433 | case0005_slice027 434 | case0005_slice028 435 | case0005_slice029 436 | case0005_slice030 437 | case0005_slice031 438 | case0005_slice032 439 | case0005_slice033 440 | case0005_slice034 441 | case0005_slice035 442 | case0005_slice036 443 | case0005_slice037 444 | case0005_slice038 445 | case0005_slice039 446 | case0005_slice040 447 | case0005_slice041 448 | case0005_slice042 449 | case0005_slice043 450 | case0005_slice044 451 | case0005_slice045 452 | case0005_slice046 453 | case0005_slice047 454 | case0005_slice048 455 | case0005_slice049 456 | case0005_slice050 457 | case0005_slice051 458 | case0005_slice052 459 | case0005_slice053 460 | case0005_slice054 461 | case0005_slice055 462 | case0005_slice056 463 | case0005_slice057 464 | case0005_slice058 465 | case0005_slice059 466 | case0005_slice060 467 | case0005_slice061 468 | case0005_slice062 469 | case0005_slice063 470 | case0005_slice064 471 | case0005_slice065 472 | case0005_slice066 473 | case0005_slice067 474 | case0005_slice068 475 | case0005_slice069 476 | case0005_slice070 477 | case0005_slice071 478 | case0005_slice072 479 | case0005_slice073 480 | case0005_slice074 481 | case0005_slice075 482 | case0005_slice076 483 | case0005_slice077 484 | case0005_slice078 485 | case0005_slice079 486 | case0005_slice080 487 | case0005_slice081 488 | case0005_slice082 489 | case0005_slice083 490 | case0005_slice084 491 | case0005_slice085 492 | case0005_slice086 493 | case0005_slice087 494 | case0005_slice088 495 | case0005_slice089 496 | case0005_slice090 497 | case0005_slice091 498 | case0005_slice092 499 | case0005_slice093 500 | case0005_slice094 501 | case0005_slice095 502 | case0005_slice096 503 | case0005_slice097 504 | case0005_slice098 505 | case0005_slice099 506 | case0005_slice100 507 | case0005_slice101 508 | case0005_slice102 509 | case0005_slice103 510 | case0005_slice104 511 | case0005_slice105 512 | case0005_slice106 513 | case0005_slice107 514 | case0005_slice108 515 | case0005_slice109 516 | case0005_slice110 517 | case0005_slice111 518 | case0005_slice112 519 | case0005_slice113 520 | case0005_slice114 521 | case0005_slice115 522 | case0005_slice116 523 | case0026_slice000 524 | case0026_slice001 525 | case0026_slice002 526 | case0026_slice003 527 | case0026_slice004 528 | case0026_slice005 529 | case0026_slice006 530 | case0026_slice007 531 | case0026_slice008 532 | case0026_slice009 533 | case0026_slice010 534 | case0026_slice011 535 | case0026_slice012 536 | case0026_slice013 537 | case0026_slice014 538 | case0026_slice015 539 | case0026_slice016 540 | case0026_slice017 541 | case0026_slice018 542 | case0026_slice019 543 | case0026_slice020 544 | case0026_slice021 545 | case0026_slice022 546 | case0026_slice023 547 | case0026_slice024 548 | case0026_slice025 549 | case0026_slice026 550 | case0026_slice027 551 | case0026_slice028 552 | case0026_slice029 553 | case0026_slice030 554 | case0026_slice031 555 | case0026_slice032 556 | case0026_slice033 557 | case0026_slice034 558 | case0026_slice035 559 | case0026_slice036 560 | case0026_slice037 561 | case0026_slice038 562 | case0026_slice039 563 | case0026_slice040 564 | case0026_slice041 565 | case0026_slice042 566 | case0026_slice043 567 | case0026_slice044 568 | case0026_slice045 569 | case0026_slice046 570 | case0026_slice047 571 | case0026_slice048 572 | case0026_slice049 573 | case0026_slice050 574 | case0026_slice051 575 | case0026_slice052 576 | case0026_slice053 577 | case0026_slice054 578 | case0026_slice055 579 | case0026_slice056 580 | case0026_slice057 581 | case0026_slice058 582 | case0026_slice059 583 | case0026_slice060 584 | case0026_slice061 585 | case0026_slice062 586 | case0026_slice063 587 | case0026_slice064 588 | case0026_slice065 589 | case0026_slice066 590 | case0026_slice067 591 | case0026_slice068 592 | case0026_slice069 593 | case0026_slice070 594 | case0026_slice071 595 | case0026_slice072 596 | case0026_slice073 597 | case0026_slice074 598 | case0026_slice075 599 | case0026_slice076 600 | case0026_slice077 601 | case0026_slice078 602 | case0026_slice079 603 | case0026_slice080 604 | case0026_slice081 605 | case0026_slice082 606 | case0026_slice083 607 | case0026_slice084 608 | case0026_slice085 609 | case0026_slice086 610 | case0026_slice087 611 | case0026_slice088 612 | case0026_slice089 613 | case0026_slice090 614 | case0026_slice091 615 | case0026_slice092 616 | case0026_slice093 617 | case0026_slice094 618 | case0026_slice095 619 | case0026_slice096 620 | case0026_slice097 621 | case0026_slice098 622 | case0026_slice099 623 | case0026_slice100 624 | case0026_slice101 625 | case0026_slice102 626 | case0026_slice103 627 | case0026_slice104 628 | case0026_slice105 629 | case0026_slice106 630 | case0026_slice107 631 | case0026_slice108 632 | case0026_slice109 633 | case0026_slice110 634 | case0026_slice111 635 | case0026_slice112 636 | case0026_slice113 637 | case0026_slice114 638 | case0026_slice115 639 | case0026_slice116 640 | case0026_slice117 641 | case0026_slice118 642 | case0026_slice119 643 | case0026_slice120 644 | case0026_slice121 645 | case0026_slice122 646 | case0026_slice123 647 | case0026_slice124 648 | case0026_slice125 649 | case0026_slice126 650 | case0026_slice127 651 | case0026_slice128 652 | case0026_slice129 653 | case0026_slice130 654 | case0039_slice000 655 | case0039_slice001 656 | case0039_slice002 657 | case0039_slice003 658 | case0039_slice004 659 | case0039_slice005 660 | case0039_slice006 661 | case0039_slice007 662 | case0039_slice008 663 | case0039_slice009 664 | case0039_slice010 665 | case0039_slice011 666 | case0039_slice012 667 | case0039_slice013 668 | case0039_slice014 669 | case0039_slice015 670 | case0039_slice016 671 | case0039_slice017 672 | case0039_slice018 673 | case0039_slice019 674 | case0039_slice020 675 | case0039_slice021 676 | case0039_slice022 677 | case0039_slice023 678 | case0039_slice024 679 | case0039_slice025 680 | case0039_slice026 681 | case0039_slice027 682 | case0039_slice028 683 | case0039_slice029 684 | case0039_slice030 685 | case0039_slice031 686 | case0039_slice032 687 | case0039_slice033 688 | case0039_slice034 689 | case0039_slice035 690 | case0039_slice036 691 | case0039_slice037 692 | case0039_slice038 693 | case0039_slice039 694 | case0039_slice040 695 | case0039_slice041 696 | case0039_slice042 697 | case0039_slice043 698 | case0039_slice044 699 | case0039_slice045 700 | case0039_slice046 701 | case0039_slice047 702 | case0039_slice048 703 | case0039_slice049 704 | case0039_slice050 705 | case0039_slice051 706 | case0039_slice052 707 | case0039_slice053 708 | case0039_slice054 709 | case0039_slice055 710 | case0039_slice056 711 | case0039_slice057 712 | case0039_slice058 713 | case0039_slice059 714 | case0039_slice060 715 | case0039_slice061 716 | case0039_slice062 717 | case0039_slice063 718 | case0039_slice064 719 | case0039_slice065 720 | case0039_slice066 721 | case0039_slice067 722 | case0039_slice068 723 | case0039_slice069 724 | case0039_slice070 725 | case0039_slice071 726 | case0039_slice072 727 | case0039_slice073 728 | case0039_slice074 729 | case0039_slice075 730 | case0039_slice076 731 | case0039_slice077 732 | case0039_slice078 733 | case0039_slice079 734 | case0039_slice080 735 | case0039_slice081 736 | case0039_slice082 737 | case0039_slice083 738 | case0039_slice084 739 | case0039_slice085 740 | case0039_slice086 741 | case0039_slice087 742 | case0039_slice088 743 | case0039_slice089 744 | case0024_slice000 745 | case0024_slice001 746 | case0024_slice002 747 | case0024_slice003 748 | case0024_slice004 749 | case0024_slice005 750 | case0024_slice006 751 | case0024_slice007 752 | case0024_slice008 753 | case0024_slice009 754 | case0024_slice010 755 | case0024_slice011 756 | case0024_slice012 757 | case0024_slice013 758 | case0024_slice014 759 | case0024_slice015 760 | case0024_slice016 761 | case0024_slice017 762 | case0024_slice018 763 | case0024_slice019 764 | case0024_slice020 765 | case0024_slice021 766 | case0024_slice022 767 | case0024_slice023 768 | case0024_slice024 769 | case0024_slice025 770 | case0024_slice026 771 | case0024_slice027 772 | case0024_slice028 773 | case0024_slice029 774 | case0024_slice030 775 | case0024_slice031 776 | case0024_slice032 777 | case0024_slice033 778 | case0024_slice034 779 | case0024_slice035 780 | case0024_slice036 781 | case0024_slice037 782 | case0024_slice038 783 | case0024_slice039 784 | case0024_slice040 785 | case0024_slice041 786 | case0024_slice042 787 | case0024_slice043 788 | case0024_slice044 789 | case0024_slice045 790 | case0024_slice046 791 | case0024_slice047 792 | case0024_slice048 793 | case0024_slice049 794 | case0024_slice050 795 | case0024_slice051 796 | case0024_slice052 797 | case0024_slice053 798 | case0024_slice054 799 | case0024_slice055 800 | case0024_slice056 801 | case0024_slice057 802 | case0024_slice058 803 | case0024_slice059 804 | case0024_slice060 805 | case0024_slice061 806 | case0024_slice062 807 | case0024_slice063 808 | case0024_slice064 809 | case0024_slice065 810 | case0024_slice066 811 | case0024_slice067 812 | case0024_slice068 813 | case0024_slice069 814 | case0024_slice070 815 | case0024_slice071 816 | case0024_slice072 817 | case0024_slice073 818 | case0024_slice074 819 | case0024_slice075 820 | case0024_slice076 821 | case0024_slice077 822 | case0024_slice078 823 | case0024_slice079 824 | case0024_slice080 825 | case0024_slice081 826 | case0024_slice082 827 | case0024_slice083 828 | case0024_slice084 829 | case0024_slice085 830 | case0024_slice086 831 | case0024_slice087 832 | case0024_slice088 833 | case0024_slice089 834 | case0024_slice090 835 | case0024_slice091 836 | case0024_slice092 837 | case0024_slice093 838 | case0024_slice094 839 | case0024_slice095 840 | case0024_slice096 841 | case0024_slice097 842 | case0024_slice098 843 | case0024_slice099 844 | case0024_slice100 845 | case0024_slice101 846 | case0024_slice102 847 | case0024_slice103 848 | case0024_slice104 849 | case0024_slice105 850 | case0024_slice106 851 | case0024_slice107 852 | case0024_slice108 853 | case0024_slice109 854 | case0024_slice110 855 | case0024_slice111 856 | case0024_slice112 857 | case0024_slice113 858 | case0024_slice114 859 | case0024_slice115 860 | case0024_slice116 861 | case0024_slice117 862 | case0024_slice118 863 | case0024_slice119 864 | case0024_slice120 865 | case0024_slice121 866 | case0024_slice122 867 | case0024_slice123 868 | case0034_slice000 869 | case0034_slice001 870 | case0034_slice002 871 | case0034_slice003 872 | case0034_slice004 873 | case0034_slice005 874 | case0034_slice006 875 | case0034_slice007 876 | case0034_slice008 877 | case0034_slice009 878 | case0034_slice010 879 | case0034_slice011 880 | case0034_slice012 881 | case0034_slice013 882 | case0034_slice014 883 | case0034_slice015 884 | case0034_slice016 885 | case0034_slice017 886 | case0034_slice018 887 | case0034_slice019 888 | case0034_slice020 889 | case0034_slice021 890 | case0034_slice022 891 | case0034_slice023 892 | case0034_slice024 893 | case0034_slice025 894 | case0034_slice026 895 | case0034_slice027 896 | case0034_slice028 897 | case0034_slice029 898 | case0034_slice030 899 | case0034_slice031 900 | case0034_slice032 901 | case0034_slice033 902 | case0034_slice034 903 | case0034_slice035 904 | case0034_slice036 905 | case0034_slice037 906 | case0034_slice038 907 | case0034_slice039 908 | case0034_slice040 909 | case0034_slice041 910 | case0034_slice042 911 | case0034_slice043 912 | case0034_slice044 913 | case0034_slice045 914 | case0034_slice046 915 | case0034_slice047 916 | case0034_slice048 917 | case0034_slice049 918 | case0034_slice050 919 | case0034_slice051 920 | case0034_slice052 921 | case0034_slice053 922 | case0034_slice054 923 | case0034_slice055 924 | case0034_slice056 925 | case0034_slice057 926 | case0034_slice058 927 | case0034_slice059 928 | case0034_slice060 929 | case0034_slice061 930 | case0034_slice062 931 | case0034_slice063 932 | case0034_slice064 933 | case0034_slice065 934 | case0034_slice066 935 | case0034_slice067 936 | case0034_slice068 937 | case0034_slice069 938 | case0034_slice070 939 | case0034_slice071 940 | case0034_slice072 941 | case0034_slice073 942 | case0034_slice074 943 | case0034_slice075 944 | case0034_slice076 945 | case0034_slice077 946 | case0034_slice078 947 | case0034_slice079 948 | case0034_slice080 949 | case0034_slice081 950 | case0034_slice082 951 | case0034_slice083 952 | case0034_slice084 953 | case0034_slice085 954 | case0034_slice086 955 | case0034_slice087 956 | case0034_slice088 957 | case0034_slice089 958 | case0034_slice090 959 | case0034_slice091 960 | case0034_slice092 961 | case0034_slice093 962 | case0034_slice094 963 | case0034_slice095 964 | case0034_slice096 965 | case0034_slice097 966 | case0033_slice000 967 | case0033_slice001 968 | case0033_slice002 969 | case0033_slice003 970 | case0033_slice004 971 | case0033_slice005 972 | case0033_slice006 973 | case0033_slice007 974 | case0033_slice008 975 | case0033_slice009 976 | case0033_slice010 977 | case0033_slice011 978 | case0033_slice012 979 | case0033_slice013 980 | case0033_slice014 981 | case0033_slice015 982 | case0033_slice016 983 | case0033_slice017 984 | case0033_slice018 985 | case0033_slice019 986 | case0033_slice020 987 | case0033_slice021 988 | case0033_slice022 989 | case0033_slice023 990 | case0033_slice024 991 | case0033_slice025 992 | case0033_slice026 993 | case0033_slice027 994 | case0033_slice028 995 | case0033_slice029 996 | case0033_slice030 997 | case0033_slice031 998 | case0033_slice032 999 | case0033_slice033 1000 | case0033_slice034 1001 | case0033_slice035 1002 | case0033_slice036 1003 | case0033_slice037 1004 | case0033_slice038 1005 | case0033_slice039 1006 | case0033_slice040 1007 | case0033_slice041 1008 | case0033_slice042 1009 | case0033_slice043 1010 | case0033_slice044 1011 | case0033_slice045 1012 | case0033_slice046 1013 | case0033_slice047 1014 | case0033_slice048 1015 | case0033_slice049 1016 | case0033_slice050 1017 | case0033_slice051 1018 | case0033_slice052 1019 | case0033_slice053 1020 | case0033_slice054 1021 | case0033_slice055 1022 | case0033_slice056 1023 | case0033_slice057 1024 | case0033_slice058 1025 | case0033_slice059 1026 | case0033_slice060 1027 | case0033_slice061 1028 | case0033_slice062 1029 | case0033_slice063 1030 | case0033_slice064 1031 | case0033_slice065 1032 | case0033_slice066 1033 | case0033_slice067 1034 | case0033_slice068 1035 | case0033_slice069 1036 | case0033_slice070 1037 | case0033_slice071 1038 | case0033_slice072 1039 | case0033_slice073 1040 | case0033_slice074 1041 | case0033_slice075 1042 | case0033_slice076 1043 | case0033_slice077 1044 | case0033_slice078 1045 | case0033_slice079 1046 | case0033_slice080 1047 | case0033_slice081 1048 | case0033_slice082 1049 | case0033_slice083 1050 | case0033_slice084 1051 | case0033_slice085 1052 | case0033_slice086 1053 | case0033_slice087 1054 | case0033_slice088 1055 | case0033_slice089 1056 | case0033_slice090 1057 | case0033_slice091 1058 | case0033_slice092 1059 | case0033_slice093 1060 | case0033_slice094 1061 | case0033_slice095 1062 | case0033_slice096 1063 | case0033_slice097 1064 | case0033_slice098 1065 | case0033_slice099 1066 | case0033_slice100 1067 | case0033_slice101 1068 | case0033_slice102 1069 | case0033_slice103 1070 | case0030_slice000 1071 | case0030_slice001 1072 | case0030_slice002 1073 | case0030_slice003 1074 | case0030_slice004 1075 | case0030_slice005 1076 | case0030_slice006 1077 | case0030_slice007 1078 | case0030_slice008 1079 | case0030_slice009 1080 | case0030_slice010 1081 | case0030_slice011 1082 | case0030_slice012 1083 | case0030_slice013 1084 | case0030_slice014 1085 | case0030_slice015 1086 | case0030_slice016 1087 | case0030_slice017 1088 | case0030_slice018 1089 | case0030_slice019 1090 | case0030_slice020 1091 | case0030_slice021 1092 | case0030_slice022 1093 | case0030_slice023 1094 | case0030_slice024 1095 | case0030_slice025 1096 | case0030_slice026 1097 | case0030_slice027 1098 | case0030_slice028 1099 | case0030_slice029 1100 | case0030_slice030 1101 | case0030_slice031 1102 | case0030_slice032 1103 | case0030_slice033 1104 | case0030_slice034 1105 | case0030_slice035 1106 | case0030_slice036 1107 | case0030_slice037 1108 | case0030_slice038 1109 | case0030_slice039 1110 | case0030_slice040 1111 | case0030_slice041 1112 | case0030_slice042 1113 | case0030_slice043 1114 | case0030_slice044 1115 | case0030_slice045 1116 | case0030_slice046 1117 | case0030_slice047 1118 | case0030_slice048 1119 | case0030_slice049 1120 | case0030_slice050 1121 | case0030_slice051 1122 | case0030_slice052 1123 | case0030_slice053 1124 | case0030_slice054 1125 | case0030_slice055 1126 | case0030_slice056 1127 | case0030_slice057 1128 | case0030_slice058 1129 | case0030_slice059 1130 | case0030_slice060 1131 | case0030_slice061 1132 | case0030_slice062 1133 | case0030_slice063 1134 | case0030_slice064 1135 | case0030_slice065 1136 | case0030_slice066 1137 | case0030_slice067 1138 | case0030_slice068 1139 | case0030_slice069 1140 | case0030_slice070 1141 | case0030_slice071 1142 | case0030_slice072 1143 | case0030_slice073 1144 | case0030_slice074 1145 | case0030_slice075 1146 | case0030_slice076 1147 | case0030_slice077 1148 | case0030_slice078 1149 | case0030_slice079 1150 | case0030_slice080 1151 | case0030_slice081 1152 | case0030_slice082 1153 | case0030_slice083 1154 | case0030_slice084 1155 | case0030_slice085 1156 | case0030_slice086 1157 | case0030_slice087 1158 | case0030_slice088 1159 | case0030_slice089 1160 | case0030_slice090 1161 | case0030_slice091 1162 | case0030_slice092 1163 | case0030_slice093 1164 | case0030_slice094 1165 | case0030_slice095 1166 | case0030_slice096 1167 | case0030_slice097 1168 | case0030_slice098 1169 | case0030_slice099 1170 | case0030_slice100 1171 | case0030_slice101 1172 | case0030_slice102 1173 | case0030_slice103 1174 | case0030_slice104 1175 | case0030_slice105 1176 | case0030_slice106 1177 | case0030_slice107 1178 | case0030_slice108 1179 | case0030_slice109 1180 | case0030_slice110 1181 | case0030_slice111 1182 | case0030_slice112 1183 | case0030_slice113 1184 | case0030_slice114 1185 | case0030_slice115 1186 | case0030_slice116 1187 | case0030_slice117 1188 | case0030_slice118 1189 | case0030_slice119 1190 | case0030_slice120 1191 | case0030_slice121 1192 | case0030_slice122 1193 | case0030_slice123 1194 | case0030_slice124 1195 | case0030_slice125 1196 | case0030_slice126 1197 | case0030_slice127 1198 | case0030_slice128 1199 | case0030_slice129 1200 | case0030_slice130 1201 | case0030_slice131 1202 | case0030_slice132 1203 | case0030_slice133 1204 | case0030_slice134 1205 | case0030_slice135 1206 | case0030_slice136 1207 | case0030_slice137 1208 | case0030_slice138 1209 | case0030_slice139 1210 | case0030_slice140 1211 | case0030_slice141 1212 | case0030_slice142 1213 | case0030_slice143 1214 | case0030_slice144 1215 | case0030_slice145 1216 | case0030_slice146 1217 | case0030_slice147 1218 | case0030_slice148 1219 | case0030_slice149 1220 | case0030_slice150 1221 | case0030_slice151 1222 | case0030_slice152 1223 | case0023_slice000 1224 | case0023_slice001 1225 | case0023_slice002 1226 | case0023_slice003 1227 | case0023_slice004 1228 | case0023_slice005 1229 | case0023_slice006 1230 | case0023_slice007 1231 | case0023_slice008 1232 | case0023_slice009 1233 | case0023_slice010 1234 | case0023_slice011 1235 | case0023_slice012 1236 | case0023_slice013 1237 | case0023_slice014 1238 | case0023_slice015 1239 | case0023_slice016 1240 | case0023_slice017 1241 | case0023_slice018 1242 | case0023_slice019 1243 | case0023_slice020 1244 | case0023_slice021 1245 | case0023_slice022 1246 | case0023_slice023 1247 | case0023_slice024 1248 | case0023_slice025 1249 | case0023_slice026 1250 | case0023_slice027 1251 | case0023_slice028 1252 | case0023_slice029 1253 | case0023_slice030 1254 | case0023_slice031 1255 | case0023_slice032 1256 | case0023_slice033 1257 | case0023_slice034 1258 | case0023_slice035 1259 | case0023_slice036 1260 | case0023_slice037 1261 | case0023_slice038 1262 | case0023_slice039 1263 | case0023_slice040 1264 | case0023_slice041 1265 | case0023_slice042 1266 | case0023_slice043 1267 | case0023_slice044 1268 | case0023_slice045 1269 | case0023_slice046 1270 | case0023_slice047 1271 | case0023_slice048 1272 | case0023_slice049 1273 | case0023_slice050 1274 | case0023_slice051 1275 | case0023_slice052 1276 | case0023_slice053 1277 | case0023_slice054 1278 | case0023_slice055 1279 | case0023_slice056 1280 | case0023_slice057 1281 | case0023_slice058 1282 | case0023_slice059 1283 | case0023_slice060 1284 | case0023_slice061 1285 | case0023_slice062 1286 | case0023_slice063 1287 | case0023_slice064 1288 | case0023_slice065 1289 | case0023_slice066 1290 | case0023_slice067 1291 | case0023_slice068 1292 | case0023_slice069 1293 | case0023_slice070 1294 | case0023_slice071 1295 | case0023_slice072 1296 | case0023_slice073 1297 | case0023_slice074 1298 | case0023_slice075 1299 | case0023_slice076 1300 | case0023_slice077 1301 | case0023_slice078 1302 | case0023_slice079 1303 | case0023_slice080 1304 | case0023_slice081 1305 | case0023_slice082 1306 | case0023_slice083 1307 | case0023_slice084 1308 | case0023_slice085 1309 | case0023_slice086 1310 | case0023_slice087 1311 | case0023_slice088 1312 | case0023_slice089 1313 | case0023_slice090 1314 | case0023_slice091 1315 | case0023_slice092 1316 | case0023_slice093 1317 | case0023_slice094 1318 | case0023_slice095 1319 | case0040_slice000 1320 | case0040_slice001 1321 | case0040_slice002 1322 | case0040_slice003 1323 | case0040_slice004 1324 | case0040_slice005 1325 | case0040_slice006 1326 | case0040_slice007 1327 | case0040_slice008 1328 | case0040_slice009 1329 | case0040_slice010 1330 | case0040_slice011 1331 | case0040_slice012 1332 | case0040_slice013 1333 | case0040_slice014 1334 | case0040_slice015 1335 | case0040_slice016 1336 | case0040_slice017 1337 | case0040_slice018 1338 | case0040_slice019 1339 | case0040_slice020 1340 | case0040_slice021 1341 | case0040_slice022 1342 | case0040_slice023 1343 | case0040_slice024 1344 | case0040_slice025 1345 | case0040_slice026 1346 | case0040_slice027 1347 | case0040_slice028 1348 | case0040_slice029 1349 | case0040_slice030 1350 | case0040_slice031 1351 | case0040_slice032 1352 | case0040_slice033 1353 | case0040_slice034 1354 | case0040_slice035 1355 | case0040_slice036 1356 | case0040_slice037 1357 | case0040_slice038 1358 | case0040_slice039 1359 | case0040_slice040 1360 | case0040_slice041 1361 | case0040_slice042 1362 | case0040_slice043 1363 | case0040_slice044 1364 | case0040_slice045 1365 | case0040_slice046 1366 | case0040_slice047 1367 | case0040_slice048 1368 | case0040_slice049 1369 | case0040_slice050 1370 | case0040_slice051 1371 | case0040_slice052 1372 | case0040_slice053 1373 | case0040_slice054 1374 | case0040_slice055 1375 | case0040_slice056 1376 | case0040_slice057 1377 | case0040_slice058 1378 | case0040_slice059 1379 | case0040_slice060 1380 | case0040_slice061 1381 | case0040_slice062 1382 | case0040_slice063 1383 | case0040_slice064 1384 | case0040_slice065 1385 | case0040_slice066 1386 | case0040_slice067 1387 | case0040_slice068 1388 | case0040_slice069 1389 | case0040_slice070 1390 | case0040_slice071 1391 | case0040_slice072 1392 | case0040_slice073 1393 | case0040_slice074 1394 | case0040_slice075 1395 | case0040_slice076 1396 | case0040_slice077 1397 | case0040_slice078 1398 | case0040_slice079 1399 | case0040_slice080 1400 | case0040_slice081 1401 | case0040_slice082 1402 | case0040_slice083 1403 | case0040_slice084 1404 | case0040_slice085 1405 | case0040_slice086 1406 | case0040_slice087 1407 | case0040_slice088 1408 | case0040_slice089 1409 | case0040_slice090 1410 | case0040_slice091 1411 | case0040_slice092 1412 | case0040_slice093 1413 | case0040_slice094 1414 | case0040_slice095 1415 | case0040_slice096 1416 | case0040_slice097 1417 | case0040_slice098 1418 | case0040_slice099 1419 | case0040_slice100 1420 | case0040_slice101 1421 | case0040_slice102 1422 | case0040_slice103 1423 | case0040_slice104 1424 | case0040_slice105 1425 | case0040_slice106 1426 | case0040_slice107 1427 | case0040_slice108 1428 | case0040_slice109 1429 | case0040_slice110 1430 | case0040_slice111 1431 | case0040_slice112 1432 | case0040_slice113 1433 | case0040_slice114 1434 | case0040_slice115 1435 | case0040_slice116 1436 | case0040_slice117 1437 | case0040_slice118 1438 | case0040_slice119 1439 | case0040_slice120 1440 | case0040_slice121 1441 | case0040_slice122 1442 | case0040_slice123 1443 | case0040_slice124 1444 | case0040_slice125 1445 | case0040_slice126 1446 | case0040_slice127 1447 | case0040_slice128 1448 | case0040_slice129 1449 | case0040_slice130 1450 | case0040_slice131 1451 | case0040_slice132 1452 | case0040_slice133 1453 | case0040_slice134 1454 | case0040_slice135 1455 | case0040_slice136 1456 | case0040_slice137 1457 | case0040_slice138 1458 | case0040_slice139 1459 | case0040_slice140 1460 | case0040_slice141 1461 | case0040_slice142 1462 | case0040_slice143 1463 | case0040_slice144 1464 | case0040_slice145 1465 | case0040_slice146 1466 | case0040_slice147 1467 | case0040_slice148 1468 | case0040_slice149 1469 | case0040_slice150 1470 | case0040_slice151 1471 | case0040_slice152 1472 | case0040_slice153 1473 | case0040_slice154 1474 | case0040_slice155 1475 | case0040_slice156 1476 | case0040_slice157 1477 | case0040_slice158 1478 | case0040_slice159 1479 | case0040_slice160 1480 | case0040_slice161 1481 | case0040_slice162 1482 | case0040_slice163 1483 | case0040_slice164 1484 | case0040_slice165 1485 | case0040_slice166 1486 | case0040_slice167 1487 | case0040_slice168 1488 | case0040_slice169 1489 | case0040_slice170 1490 | case0040_slice171 1491 | case0040_slice172 1492 | case0040_slice173 1493 | case0040_slice174 1494 | case0040_slice175 1495 | case0040_slice176 1496 | case0040_slice177 1497 | case0040_slice178 1498 | case0040_slice179 1499 | case0040_slice180 1500 | case0040_slice181 1501 | case0040_slice182 1502 | case0040_slice183 1503 | case0040_slice184 1504 | case0040_slice185 1505 | case0040_slice186 1506 | case0040_slice187 1507 | case0040_slice188 1508 | case0040_slice189 1509 | case0040_slice190 1510 | case0040_slice191 1511 | case0040_slice192 1512 | case0040_slice193 1513 | case0040_slice194 1514 | case0010_slice000 1515 | case0010_slice001 1516 | case0010_slice002 1517 | case0010_slice003 1518 | case0010_slice004 1519 | case0010_slice005 1520 | case0010_slice006 1521 | case0010_slice007 1522 | case0010_slice008 1523 | case0010_slice009 1524 | case0010_slice010 1525 | case0010_slice011 1526 | case0010_slice012 1527 | case0010_slice013 1528 | case0010_slice014 1529 | case0010_slice015 1530 | case0010_slice016 1531 | case0010_slice017 1532 | case0010_slice018 1533 | case0010_slice019 1534 | case0010_slice020 1535 | case0010_slice021 1536 | case0010_slice022 1537 | case0010_slice023 1538 | case0010_slice024 1539 | case0010_slice025 1540 | case0010_slice026 1541 | case0010_slice027 1542 | case0010_slice028 1543 | case0010_slice029 1544 | case0010_slice030 1545 | case0010_slice031 1546 | case0010_slice032 1547 | case0010_slice033 1548 | case0010_slice034 1549 | case0010_slice035 1550 | case0010_slice036 1551 | case0010_slice037 1552 | case0010_slice038 1553 | case0010_slice039 1554 | case0010_slice040 1555 | case0010_slice041 1556 | case0010_slice042 1557 | case0010_slice043 1558 | case0010_slice044 1559 | case0010_slice045 1560 | case0010_slice046 1561 | case0010_slice047 1562 | case0010_slice048 1563 | case0010_slice049 1564 | case0010_slice050 1565 | case0010_slice051 1566 | case0010_slice052 1567 | case0010_slice053 1568 | case0010_slice054 1569 | case0010_slice055 1570 | case0010_slice056 1571 | case0010_slice057 1572 | case0010_slice058 1573 | case0010_slice059 1574 | case0010_slice060 1575 | case0010_slice061 1576 | case0010_slice062 1577 | case0010_slice063 1578 | case0010_slice064 1579 | case0010_slice065 1580 | case0010_slice066 1581 | case0010_slice067 1582 | case0010_slice068 1583 | case0010_slice069 1584 | case0010_slice070 1585 | case0010_slice071 1586 | case0010_slice072 1587 | case0010_slice073 1588 | case0010_slice074 1589 | case0010_slice075 1590 | case0010_slice076 1591 | case0010_slice077 1592 | case0010_slice078 1593 | case0010_slice079 1594 | case0010_slice080 1595 | case0010_slice081 1596 | case0010_slice082 1597 | case0010_slice083 1598 | case0010_slice084 1599 | case0010_slice085 1600 | case0010_slice086 1601 | case0010_slice087 1602 | case0010_slice088 1603 | case0010_slice089 1604 | case0010_slice090 1605 | case0010_slice091 1606 | case0010_slice092 1607 | case0010_slice093 1608 | case0010_slice094 1609 | case0010_slice095 1610 | case0010_slice096 1611 | case0010_slice097 1612 | case0010_slice098 1613 | case0010_slice099 1614 | case0010_slice100 1615 | case0010_slice101 1616 | case0010_slice102 1617 | case0010_slice103 1618 | case0010_slice104 1619 | case0010_slice105 1620 | case0010_slice106 1621 | case0010_slice107 1622 | case0010_slice108 1623 | case0010_slice109 1624 | case0010_slice110 1625 | case0010_slice111 1626 | case0010_slice112 1627 | case0010_slice113 1628 | case0010_slice114 1629 | case0010_slice115 1630 | case0010_slice116 1631 | case0010_slice117 1632 | case0010_slice118 1633 | case0010_slice119 1634 | case0010_slice120 1635 | case0010_slice121 1636 | case0010_slice122 1637 | case0010_slice123 1638 | case0010_slice124 1639 | case0010_slice125 1640 | case0010_slice126 1641 | case0010_slice127 1642 | case0010_slice128 1643 | case0010_slice129 1644 | case0010_slice130 1645 | case0010_slice131 1646 | case0010_slice132 1647 | case0010_slice133 1648 | case0010_slice134 1649 | case0010_slice135 1650 | case0010_slice136 1651 | case0010_slice137 1652 | case0010_slice138 1653 | case0010_slice139 1654 | case0010_slice140 1655 | case0010_slice141 1656 | case0010_slice142 1657 | case0010_slice143 1658 | case0010_slice144 1659 | case0010_slice145 1660 | case0010_slice146 1661 | case0010_slice147 1662 | case0021_slice000 1663 | case0021_slice001 1664 | case0021_slice002 1665 | case0021_slice003 1666 | case0021_slice004 1667 | case0021_slice005 1668 | case0021_slice006 1669 | case0021_slice007 1670 | case0021_slice008 1671 | case0021_slice009 1672 | case0021_slice010 1673 | case0021_slice011 1674 | case0021_slice012 1675 | case0021_slice013 1676 | case0021_slice014 1677 | case0021_slice015 1678 | case0021_slice016 1679 | case0021_slice017 1680 | case0021_slice018 1681 | case0021_slice019 1682 | case0021_slice020 1683 | case0021_slice021 1684 | case0021_slice022 1685 | case0021_slice023 1686 | case0021_slice024 1687 | case0021_slice025 1688 | case0021_slice026 1689 | case0021_slice027 1690 | case0021_slice028 1691 | case0021_slice029 1692 | case0021_slice030 1693 | case0021_slice031 1694 | case0021_slice032 1695 | case0021_slice033 1696 | case0021_slice034 1697 | case0021_slice035 1698 | case0021_slice036 1699 | case0021_slice037 1700 | case0021_slice038 1701 | case0021_slice039 1702 | case0021_slice040 1703 | case0021_slice041 1704 | case0021_slice042 1705 | case0021_slice043 1706 | case0021_slice044 1707 | case0021_slice045 1708 | case0021_slice046 1709 | case0021_slice047 1710 | case0021_slice048 1711 | case0021_slice049 1712 | case0021_slice050 1713 | case0021_slice051 1714 | case0021_slice052 1715 | case0021_slice053 1716 | case0021_slice054 1717 | case0021_slice055 1718 | case0021_slice056 1719 | case0021_slice057 1720 | case0021_slice058 1721 | case0021_slice059 1722 | case0021_slice060 1723 | case0021_slice061 1724 | case0021_slice062 1725 | case0021_slice063 1726 | case0021_slice064 1727 | case0021_slice065 1728 | case0021_slice066 1729 | case0021_slice067 1730 | case0021_slice068 1731 | case0021_slice069 1732 | case0021_slice070 1733 | case0021_slice071 1734 | case0021_slice072 1735 | case0021_slice073 1736 | case0021_slice074 1737 | case0021_slice075 1738 | case0021_slice076 1739 | case0021_slice077 1740 | case0021_slice078 1741 | case0021_slice079 1742 | case0021_slice080 1743 | case0021_slice081 1744 | case0021_slice082 1745 | case0021_slice083 1746 | case0021_slice084 1747 | case0021_slice085 1748 | case0021_slice086 1749 | case0021_slice087 1750 | case0021_slice088 1751 | case0021_slice089 1752 | case0021_slice090 1753 | case0021_slice091 1754 | case0021_slice092 1755 | case0021_slice093 1756 | case0021_slice094 1757 | case0021_slice095 1758 | case0021_slice096 1759 | case0021_slice097 1760 | case0021_slice098 1761 | case0021_slice099 1762 | case0021_slice100 1763 | case0021_slice101 1764 | case0021_slice102 1765 | case0021_slice103 1766 | case0021_slice104 1767 | case0021_slice105 1768 | case0021_slice106 1769 | case0021_slice107 1770 | case0021_slice108 1771 | case0021_slice109 1772 | case0021_slice110 1773 | case0021_slice111 1774 | case0021_slice112 1775 | case0021_slice113 1776 | case0021_slice114 1777 | case0021_slice115 1778 | case0021_slice116 1779 | case0021_slice117 1780 | case0021_slice118 1781 | case0021_slice119 1782 | case0021_slice120 1783 | case0021_slice121 1784 | case0021_slice122 1785 | case0021_slice123 1786 | case0021_slice124 1787 | case0021_slice125 1788 | case0021_slice126 1789 | case0021_slice127 1790 | case0021_slice128 1791 | case0021_slice129 1792 | case0021_slice130 1793 | case0021_slice131 1794 | case0021_slice132 1795 | case0021_slice133 1796 | case0021_slice134 1797 | case0021_slice135 1798 | case0021_slice136 1799 | case0021_slice137 1800 | case0021_slice138 1801 | case0021_slice139 1802 | case0021_slice140 1803 | case0021_slice141 1804 | case0021_slice142 1805 | case0006_slice000 1806 | case0006_slice001 1807 | case0006_slice002 1808 | case0006_slice003 1809 | case0006_slice004 1810 | case0006_slice005 1811 | case0006_slice006 1812 | case0006_slice007 1813 | case0006_slice008 1814 | case0006_slice009 1815 | case0006_slice010 1816 | case0006_slice011 1817 | case0006_slice012 1818 | case0006_slice013 1819 | case0006_slice014 1820 | case0006_slice015 1821 | case0006_slice016 1822 | case0006_slice017 1823 | case0006_slice018 1824 | case0006_slice019 1825 | case0006_slice020 1826 | case0006_slice021 1827 | case0006_slice022 1828 | case0006_slice023 1829 | case0006_slice024 1830 | case0006_slice025 1831 | case0006_slice026 1832 | case0006_slice027 1833 | case0006_slice028 1834 | case0006_slice029 1835 | case0006_slice030 1836 | case0006_slice031 1837 | case0006_slice032 1838 | case0006_slice033 1839 | case0006_slice034 1840 | case0006_slice035 1841 | case0006_slice036 1842 | case0006_slice037 1843 | case0006_slice038 1844 | case0006_slice039 1845 | case0006_slice040 1846 | case0006_slice041 1847 | case0006_slice042 1848 | case0006_slice043 1849 | case0006_slice044 1850 | case0006_slice045 1851 | case0006_slice046 1852 | case0006_slice047 1853 | case0006_slice048 1854 | case0006_slice049 1855 | case0006_slice050 1856 | case0006_slice051 1857 | case0006_slice052 1858 | case0006_slice053 1859 | case0006_slice054 1860 | case0006_slice055 1861 | case0006_slice056 1862 | case0006_slice057 1863 | case0006_slice058 1864 | case0006_slice059 1865 | case0006_slice060 1866 | case0006_slice061 1867 | case0006_slice062 1868 | case0006_slice063 1869 | case0006_slice064 1870 | case0006_slice065 1871 | case0006_slice066 1872 | case0006_slice067 1873 | case0006_slice068 1874 | case0006_slice069 1875 | case0006_slice070 1876 | case0006_slice071 1877 | case0006_slice072 1878 | case0006_slice073 1879 | case0006_slice074 1880 | case0006_slice075 1881 | case0006_slice076 1882 | case0006_slice077 1883 | case0006_slice078 1884 | case0006_slice079 1885 | case0006_slice080 1886 | case0006_slice081 1887 | case0006_slice082 1888 | case0006_slice083 1889 | case0006_slice084 1890 | case0006_slice085 1891 | case0006_slice086 1892 | case0006_slice087 1893 | case0006_slice088 1894 | case0006_slice089 1895 | case0006_slice090 1896 | case0006_slice091 1897 | case0006_slice092 1898 | case0006_slice093 1899 | case0006_slice094 1900 | case0006_slice095 1901 | case0006_slice096 1902 | case0006_slice097 1903 | case0006_slice098 1904 | case0006_slice099 1905 | case0006_slice100 1906 | case0006_slice101 1907 | case0006_slice102 1908 | case0006_slice103 1909 | case0006_slice104 1910 | case0006_slice105 1911 | case0006_slice106 1912 | case0006_slice107 1913 | case0006_slice108 1914 | case0006_slice109 1915 | case0006_slice110 1916 | case0006_slice111 1917 | case0006_slice112 1918 | case0006_slice113 1919 | case0006_slice114 1920 | case0006_slice115 1921 | case0006_slice116 1922 | case0006_slice117 1923 | case0006_slice118 1924 | case0006_slice119 1925 | case0006_slice120 1926 | case0006_slice121 1927 | case0006_slice122 1928 | case0006_slice123 1929 | case0006_slice124 1930 | case0006_slice125 1931 | case0006_slice126 1932 | case0006_slice127 1933 | case0006_slice128 1934 | case0006_slice129 1935 | case0006_slice130 1936 | case0027_slice000 1937 | case0027_slice001 1938 | case0027_slice002 1939 | case0027_slice003 1940 | case0027_slice004 1941 | case0027_slice005 1942 | case0027_slice006 1943 | case0027_slice007 1944 | case0027_slice008 1945 | case0027_slice009 1946 | case0027_slice010 1947 | case0027_slice011 1948 | case0027_slice012 1949 | case0027_slice013 1950 | case0027_slice014 1951 | case0027_slice015 1952 | case0027_slice016 1953 | case0027_slice017 1954 | case0027_slice018 1955 | case0027_slice019 1956 | case0027_slice020 1957 | case0027_slice021 1958 | case0027_slice022 1959 | case0027_slice023 1960 | case0027_slice024 1961 | case0027_slice025 1962 | case0027_slice026 1963 | case0027_slice027 1964 | case0027_slice028 1965 | case0027_slice029 1966 | case0027_slice030 1967 | case0027_slice031 1968 | case0027_slice032 1969 | case0027_slice033 1970 | case0027_slice034 1971 | case0027_slice035 1972 | case0027_slice036 1973 | case0027_slice037 1974 | case0027_slice038 1975 | case0027_slice039 1976 | case0027_slice040 1977 | case0027_slice041 1978 | case0027_slice042 1979 | case0027_slice043 1980 | case0027_slice044 1981 | case0027_slice045 1982 | case0027_slice046 1983 | case0027_slice047 1984 | case0027_slice048 1985 | case0027_slice049 1986 | case0027_slice050 1987 | case0027_slice051 1988 | case0027_slice052 1989 | case0027_slice053 1990 | case0027_slice054 1991 | case0027_slice055 1992 | case0027_slice056 1993 | case0027_slice057 1994 | case0027_slice058 1995 | case0027_slice059 1996 | case0027_slice060 1997 | case0027_slice061 1998 | case0027_slice062 1999 | case0027_slice063 2000 | case0027_slice064 2001 | case0027_slice065 2002 | case0027_slice066 2003 | case0027_slice067 2004 | case0027_slice068 2005 | case0027_slice069 2006 | case0027_slice070 2007 | case0027_slice071 2008 | case0027_slice072 2009 | case0027_slice073 2010 | case0027_slice074 2011 | case0027_slice075 2012 | case0027_slice076 2013 | case0027_slice077 2014 | case0027_slice078 2015 | case0027_slice079 2016 | case0027_slice080 2017 | case0027_slice081 2018 | case0027_slice082 2019 | case0027_slice083 2020 | case0027_slice084 2021 | case0027_slice085 2022 | case0027_slice086 2023 | case0027_slice087 2024 | case0028_slice000 2025 | case0028_slice001 2026 | case0028_slice002 2027 | case0028_slice003 2028 | case0028_slice004 2029 | case0028_slice005 2030 | case0028_slice006 2031 | case0028_slice007 2032 | case0028_slice008 2033 | case0028_slice009 2034 | case0028_slice010 2035 | case0028_slice011 2036 | case0028_slice012 2037 | case0028_slice013 2038 | case0028_slice014 2039 | case0028_slice015 2040 | case0028_slice016 2041 | case0028_slice017 2042 | case0028_slice018 2043 | case0028_slice019 2044 | case0028_slice020 2045 | case0028_slice021 2046 | case0028_slice022 2047 | case0028_slice023 2048 | case0028_slice024 2049 | case0028_slice025 2050 | case0028_slice026 2051 | case0028_slice027 2052 | case0028_slice028 2053 | case0028_slice029 2054 | case0028_slice030 2055 | case0028_slice031 2056 | case0028_slice032 2057 | case0028_slice033 2058 | case0028_slice034 2059 | case0028_slice035 2060 | case0028_slice036 2061 | case0028_slice037 2062 | case0028_slice038 2063 | case0028_slice039 2064 | case0028_slice040 2065 | case0028_slice041 2066 | case0028_slice042 2067 | case0028_slice043 2068 | case0028_slice044 2069 | case0028_slice045 2070 | case0028_slice046 2071 | case0028_slice047 2072 | case0028_slice048 2073 | case0028_slice049 2074 | case0028_slice050 2075 | case0028_slice051 2076 | case0028_slice052 2077 | case0028_slice053 2078 | case0028_slice054 2079 | case0028_slice055 2080 | case0028_slice056 2081 | case0028_slice057 2082 | case0028_slice058 2083 | case0028_slice059 2084 | case0028_slice060 2085 | case0028_slice061 2086 | case0028_slice062 2087 | case0028_slice063 2088 | case0028_slice064 2089 | case0028_slice065 2090 | case0028_slice066 2091 | case0028_slice067 2092 | case0028_slice068 2093 | case0028_slice069 2094 | case0028_slice070 2095 | case0028_slice071 2096 | case0028_slice072 2097 | case0028_slice073 2098 | case0028_slice074 2099 | case0028_slice075 2100 | case0028_slice076 2101 | case0028_slice077 2102 | case0028_slice078 2103 | case0028_slice079 2104 | case0028_slice080 2105 | case0028_slice081 2106 | case0028_slice082 2107 | case0028_slice083 2108 | case0028_slice084 2109 | case0028_slice085 2110 | case0028_slice086 2111 | case0028_slice087 2112 | case0028_slice088 2113 | case0037_slice000 2114 | case0037_slice001 2115 | case0037_slice002 2116 | case0037_slice003 2117 | case0037_slice004 2118 | case0037_slice005 2119 | case0037_slice006 2120 | case0037_slice007 2121 | case0037_slice008 2122 | case0037_slice009 2123 | case0037_slice010 2124 | case0037_slice011 2125 | case0037_slice012 2126 | case0037_slice013 2127 | case0037_slice014 2128 | case0037_slice015 2129 | case0037_slice016 2130 | case0037_slice017 2131 | case0037_slice018 2132 | case0037_slice019 2133 | case0037_slice020 2134 | case0037_slice021 2135 | case0037_slice022 2136 | case0037_slice023 2137 | case0037_slice024 2138 | case0037_slice025 2139 | case0037_slice026 2140 | case0037_slice027 2141 | case0037_slice028 2142 | case0037_slice029 2143 | case0037_slice030 2144 | case0037_slice031 2145 | case0037_slice032 2146 | case0037_slice033 2147 | case0037_slice034 2148 | case0037_slice035 2149 | case0037_slice036 2150 | case0037_slice037 2151 | case0037_slice038 2152 | case0037_slice039 2153 | case0037_slice040 2154 | case0037_slice041 2155 | case0037_slice042 2156 | case0037_slice043 2157 | case0037_slice044 2158 | case0037_slice045 2159 | case0037_slice046 2160 | case0037_slice047 2161 | case0037_slice048 2162 | case0037_slice049 2163 | case0037_slice050 2164 | case0037_slice051 2165 | case0037_slice052 2166 | case0037_slice053 2167 | case0037_slice054 2168 | case0037_slice055 2169 | case0037_slice056 2170 | case0037_slice057 2171 | case0037_slice058 2172 | case0037_slice059 2173 | case0037_slice060 2174 | case0037_slice061 2175 | case0037_slice062 2176 | case0037_slice063 2177 | case0037_slice064 2178 | case0037_slice065 2179 | case0037_slice066 2180 | case0037_slice067 2181 | case0037_slice068 2182 | case0037_slice069 2183 | case0037_slice070 2184 | case0037_slice071 2185 | case0037_slice072 2186 | case0037_slice073 2187 | case0037_slice074 2188 | case0037_slice075 2189 | case0037_slice076 2190 | case0037_slice077 2191 | case0037_slice078 2192 | case0037_slice079 2193 | case0037_slice080 2194 | case0037_slice081 2195 | case0037_slice082 2196 | case0037_slice083 2197 | case0037_slice084 2198 | case0037_slice085 2199 | case0037_slice086 2200 | case0037_slice087 2201 | case0037_slice088 2202 | case0037_slice089 2203 | case0037_slice090 2204 | case0037_slice091 2205 | case0037_slice092 2206 | case0037_slice093 2207 | case0037_slice094 2208 | case0037_slice095 2209 | case0037_slice096 2210 | case0037_slice097 2211 | case0037_slice098 2212 | -------------------------------------------------------------------------------- /networks/vit_seg_configs.py: -------------------------------------------------------------------------------- 1 | import ml_collections 2 | 3 | def get_b16_config(): 4 | """Returns the ViT-B/16 configuration.""" 5 | config = ml_collections.ConfigDict() 6 | config.patches = ml_collections.ConfigDict({'size': (16, 16)}) 7 | config.hidden_size = 768 8 | config.transformer = ml_collections.ConfigDict() 9 | config.transformer.mlp_dim = 3072 10 | config.transformer.num_heads = 12 11 | config.transformer.num_layers = 12 12 | config.transformer.attention_dropout_rate = 0.0 13 | config.transformer.dropout_rate = 0.1 14 | 15 | config.classifier = 'seg' 16 | config.representation_size = None 17 | config.resnet_pretrained_path = None 18 | config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-B_16.npz' 19 | config.patch_size = 16 20 | 21 | config.decoder_channels = (256, 128, 64, 16) 22 | config.n_classes = 2 23 | config.activation = 'softmax' 24 | return config 25 | 26 | 27 | def get_testing(): 28 | """Returns a minimal configuration for testing.""" 29 | config = ml_collections.ConfigDict() 30 | config.patches = ml_collections.ConfigDict({'size': (16, 16)}) 31 | config.hidden_size = 1 32 | config.transformer = ml_collections.ConfigDict() 33 | config.transformer.mlp_dim = 1 34 | config.transformer.num_heads = 1 35 | config.transformer.num_layers = 1 36 | config.transformer.attention_dropout_rate = 0.0 37 | config.transformer.dropout_rate = 0.1 38 | config.classifier = 'token' 39 | config.representation_size = None 40 | return config 41 | 42 | def get_r50_b16_config(): 43 | """Returns the Resnet50 + ViT-B/16 configuration.""" 44 | config = get_b16_config() 45 | config.patches.grid = (16, 16) 46 | config.resnet = ml_collections.ConfigDict() 47 | config.resnet.num_layers = (3, 4, 9) 48 | config.resnet.width_factor = 1 49 | 50 | config.classifier = 'seg' 51 | config.pretrained_path = '../model/vit_checkpoint/imagenet21k/R50+ViT-B_16.npz' 52 | config.decoder_channels = (256, 128, 64, 16) 53 | config.skip_channels = [512, 256, 64, 16] 54 | config.n_classes = 2 55 | config.n_skip = 3 56 | config.activation = 'softmax' 57 | 58 | return config 59 | 60 | 61 | def get_b32_config(): 62 | """Returns the ViT-B/32 configuration.""" 63 | config = get_b16_config() 64 | config.patches.size = (32, 32) 65 | config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-B_32.npz' 66 | return config 67 | 68 | 69 | def get_l16_config(): 70 | """Returns the ViT-L/16 configuration.""" 71 | config = ml_collections.ConfigDict() 72 | config.patches = ml_collections.ConfigDict({'size': (16, 16)}) 73 | config.hidden_size = 1024 74 | config.transformer = ml_collections.ConfigDict() 75 | config.transformer.mlp_dim = 4096 76 | config.transformer.num_heads = 16 77 | config.transformer.num_layers = 24 78 | config.transformer.attention_dropout_rate = 0.0 79 | config.transformer.dropout_rate = 0.1 80 | config.representation_size = None 81 | 82 | # custom 83 | config.classifier = 'seg' 84 | config.resnet_pretrained_path = None 85 | config.pretrained_path = '../model/vit_checkpoint/imagenet21k/ViT-L_16.npz' 86 | config.decoder_channels = (256, 128, 64, 16) 87 | config.n_classes = 2 88 | config.activation = 'softmax' 89 | return config 90 | 91 | 92 | def get_r50_l16_config(): 93 | """Returns the Resnet50 + ViT-L/16 configuration. customized """ 94 | config = get_l16_config() 95 | config.patches.grid = (16, 16) 96 | config.resnet = ml_collections.ConfigDict() 97 | config.resnet.num_layers = (3, 4, 9) 98 | config.resnet.width_factor = 1 99 | 100 | config.classifier = 'seg' 101 | config.resnet_pretrained_path = '../model/vit_checkpoint/imagenet21k/R50+ViT-B_16.npz' 102 | config.decoder_channels = (256, 128, 64, 16) 103 | config.skip_channels = [512, 256, 64, 16] 104 | config.n_classes = 2 105 | config.activation = 'softmax' 106 | return config 107 | 108 | 109 | def get_l32_config(): 110 | """Returns the ViT-L/32 configuration.""" 111 | config = get_l16_config() 112 | config.patches.size = (32, 32) 113 | return config 114 | 115 | 116 | def get_h14_config(): 117 | """Returns the ViT-L/16 configuration.""" 118 | config = ml_collections.ConfigDict() 119 | config.patches = ml_collections.ConfigDict({'size': (14, 14)}) 120 | config.hidden_size = 1280 121 | config.transformer = ml_collections.ConfigDict() 122 | config.transformer.mlp_dim = 5120 123 | config.transformer.num_heads = 16 124 | config.transformer.num_layers = 32 125 | config.transformer.attention_dropout_rate = 0.0 126 | config.transformer.dropout_rate = 0.1 127 | config.classifier = 'token' 128 | config.representation_size = None 129 | 130 | return config 131 | -------------------------------------------------------------------------------- /networks/vit_seg_modeling.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from __future__ import absolute_import 3 | from __future__ import division 4 | from __future__ import print_function 5 | 6 | import copy 7 | import logging 8 | import math 9 | 10 | from os.path import join as pjoin 11 | 12 | import torch 13 | import torch.nn as nn 14 | import numpy as np 15 | 16 | from torch.nn import CrossEntropyLoss, Dropout, Softmax, Linear, Conv2d, LayerNorm 17 | from torch.nn.modules.utils import _pair 18 | from scipy import ndimage 19 | from . import vit_seg_configs as configs 20 | from .vit_seg_modeling_resnet_skip import ResNetV2 21 | 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | ATTENTION_Q = "MultiHeadDotProductAttention_1/query" 27 | ATTENTION_K = "MultiHeadDotProductAttention_1/key" 28 | ATTENTION_V = "MultiHeadDotProductAttention_1/value" 29 | ATTENTION_OUT = "MultiHeadDotProductAttention_1/out" 30 | FC_0 = "MlpBlock_3/Dense_0" 31 | FC_1 = "MlpBlock_3/Dense_1" 32 | ATTENTION_NORM = "LayerNorm_0" 33 | MLP_NORM = "LayerNorm_2" 34 | 35 | 36 | def np2th(weights, conv=False): 37 | """Possibly convert HWIO to OIHW.""" 38 | if conv: 39 | weights = weights.transpose([3, 2, 0, 1]) 40 | return torch.from_numpy(weights) 41 | 42 | 43 | def swish(x): 44 | return x * torch.sigmoid(x) 45 | 46 | 47 | ACT2FN = {"gelu": torch.nn.functional.gelu, "relu": torch.nn.functional.relu, "swish": swish} 48 | 49 | 50 | class Attention(nn.Module): 51 | def __init__(self, config, vis): 52 | super(Attention, self).__init__() 53 | self.vis = vis 54 | self.num_attention_heads = config.transformer["num_heads"] 55 | self.attention_head_size = int(config.hidden_size / self.num_attention_heads) 56 | self.all_head_size = self.num_attention_heads * self.attention_head_size 57 | 58 | self.query = Linear(config.hidden_size, self.all_head_size) 59 | self.key = Linear(config.hidden_size, self.all_head_size) 60 | self.value = Linear(config.hidden_size, self.all_head_size) 61 | 62 | self.out = Linear(config.hidden_size, config.hidden_size) 63 | self.attn_dropout = Dropout(config.transformer["attention_dropout_rate"]) 64 | self.proj_dropout = Dropout(config.transformer["attention_dropout_rate"]) 65 | 66 | self.softmax = Softmax(dim=-1) 67 | 68 | def transpose_for_scores(self, x): 69 | new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) 70 | x = x.view(*new_x_shape) 71 | return x.permute(0, 2, 1, 3) 72 | 73 | def forward(self, hidden_states): 74 | mixed_query_layer = self.query(hidden_states) 75 | mixed_key_layer = self.key(hidden_states) 76 | mixed_value_layer = self.value(hidden_states) 77 | 78 | query_layer = self.transpose_for_scores(mixed_query_layer) 79 | key_layer = self.transpose_for_scores(mixed_key_layer) 80 | value_layer = self.transpose_for_scores(mixed_value_layer) 81 | 82 | attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) 83 | attention_scores = attention_scores / math.sqrt(self.attention_head_size) 84 | attention_probs = self.softmax(attention_scores) 85 | weights = attention_probs if self.vis else None 86 | attention_probs = self.attn_dropout(attention_probs) 87 | 88 | context_layer = torch.matmul(attention_probs, value_layer) 89 | context_layer = context_layer.permute(0, 2, 1, 3).contiguous() 90 | new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) 91 | context_layer = context_layer.view(*new_context_layer_shape) 92 | attention_output = self.out(context_layer) 93 | attention_output = self.proj_dropout(attention_output) 94 | return attention_output, weights 95 | 96 | 97 | class Mlp(nn.Module): 98 | def __init__(self, config): 99 | super(Mlp, self).__init__() 100 | self.fc1 = Linear(config.hidden_size, config.transformer["mlp_dim"]) 101 | self.fc2 = Linear(config.transformer["mlp_dim"], config.hidden_size) 102 | self.act_fn = ACT2FN["gelu"] 103 | self.dropout = Dropout(config.transformer["dropout_rate"]) 104 | 105 | self._init_weights() 106 | 107 | def _init_weights(self): 108 | nn.init.xavier_uniform_(self.fc1.weight) 109 | nn.init.xavier_uniform_(self.fc2.weight) 110 | nn.init.normal_(self.fc1.bias, std=1e-6) 111 | nn.init.normal_(self.fc2.bias, std=1e-6) 112 | 113 | def forward(self, x): 114 | x = self.fc1(x) 115 | x = self.act_fn(x) 116 | x = self.dropout(x) 117 | x = self.fc2(x) 118 | x = self.dropout(x) 119 | return x 120 | 121 | 122 | class Embeddings(nn.Module): 123 | """Construct the embeddings from patch, position embeddings. 124 | """ 125 | def __init__(self, config, img_size, in_channels=3): 126 | super(Embeddings, self).__init__() 127 | self.hybrid = None 128 | self.config = config 129 | img_size = _pair(img_size) 130 | 131 | if config.patches.get("grid") is not None: # ResNet 132 | grid_size = config.patches["grid"] 133 | patch_size = (img_size[0] // 16 // grid_size[0], img_size[1] // 16 // grid_size[1]) 134 | patch_size_real = (patch_size[0] * 16, patch_size[1] * 16) 135 | n_patches = (img_size[0] // patch_size_real[0]) * (img_size[1] // patch_size_real[1]) 136 | self.hybrid = True 137 | else: 138 | patch_size = _pair(config.patches["size"]) 139 | n_patches = (img_size[0] // patch_size[0]) * (img_size[1] // patch_size[1]) 140 | self.hybrid = False 141 | 142 | if self.hybrid: 143 | self.hybrid_model = ResNetV2(block_units=config.resnet.num_layers, width_factor=config.resnet.width_factor) 144 | in_channels = self.hybrid_model.width * 16 145 | self.patch_embeddings = Conv2d(in_channels=in_channels, 146 | out_channels=config.hidden_size, 147 | kernel_size=patch_size, 148 | stride=patch_size) 149 | self.position_embeddings = nn.Parameter(torch.zeros(1, n_patches, config.hidden_size)) 150 | 151 | self.dropout = Dropout(config.transformer["dropout_rate"]) 152 | 153 | 154 | def forward(self, x): 155 | if self.hybrid: 156 | x, features = self.hybrid_model(x) 157 | else: 158 | features = None 159 | x = self.patch_embeddings(x) # (B, hidden. n_patches^(1/2), n_patches^(1/2)) 160 | x = x.flatten(2) 161 | x = x.transpose(-1, -2) # (B, n_patches, hidden) 162 | 163 | embeddings = x + self.position_embeddings 164 | embeddings = self.dropout(embeddings) 165 | return embeddings, features 166 | 167 | 168 | class Block(nn.Module): 169 | def __init__(self, config, vis): 170 | super(Block, self).__init__() 171 | self.hidden_size = config.hidden_size 172 | self.attention_norm = LayerNorm(config.hidden_size, eps=1e-6) 173 | self.ffn_norm = LayerNorm(config.hidden_size, eps=1e-6) 174 | self.ffn = Mlp(config) 175 | self.attn = Attention(config, vis) 176 | 177 | def forward(self, x): 178 | h = x 179 | x = self.attention_norm(x) 180 | x, weights = self.attn(x) 181 | x = x + h 182 | 183 | h = x 184 | x = self.ffn_norm(x) 185 | x = self.ffn(x) 186 | x = x + h 187 | return x, weights 188 | 189 | def load_from(self, weights, n_block): 190 | ROOT = f"Transformer/encoderblock_{n_block}" 191 | with torch.no_grad(): 192 | query_weight = np2th(weights[pjoin(ROOT, ATTENTION_Q, "kernel")]).view(self.hidden_size, self.hidden_size).t() 193 | key_weight = np2th(weights[pjoin(ROOT, ATTENTION_K, "kernel")]).view(self.hidden_size, self.hidden_size).t() 194 | value_weight = np2th(weights[pjoin(ROOT, ATTENTION_V, "kernel")]).view(self.hidden_size, self.hidden_size).t() 195 | out_weight = np2th(weights[pjoin(ROOT, ATTENTION_OUT, "kernel")]).view(self.hidden_size, self.hidden_size).t() 196 | 197 | query_bias = np2th(weights[pjoin(ROOT, ATTENTION_Q, "bias")]).view(-1) 198 | key_bias = np2th(weights[pjoin(ROOT, ATTENTION_K, "bias")]).view(-1) 199 | value_bias = np2th(weights[pjoin(ROOT, ATTENTION_V, "bias")]).view(-1) 200 | out_bias = np2th(weights[pjoin(ROOT, ATTENTION_OUT, "bias")]).view(-1) 201 | 202 | self.attn.query.weight.copy_(query_weight) 203 | self.attn.key.weight.copy_(key_weight) 204 | self.attn.value.weight.copy_(value_weight) 205 | self.attn.out.weight.copy_(out_weight) 206 | self.attn.query.bias.copy_(query_bias) 207 | self.attn.key.bias.copy_(key_bias) 208 | self.attn.value.bias.copy_(value_bias) 209 | self.attn.out.bias.copy_(out_bias) 210 | 211 | mlp_weight_0 = np2th(weights[pjoin(ROOT, FC_0, "kernel")]).t() 212 | mlp_weight_1 = np2th(weights[pjoin(ROOT, FC_1, "kernel")]).t() 213 | mlp_bias_0 = np2th(weights[pjoin(ROOT, FC_0, "bias")]).t() 214 | mlp_bias_1 = np2th(weights[pjoin(ROOT, FC_1, "bias")]).t() 215 | 216 | self.ffn.fc1.weight.copy_(mlp_weight_0) 217 | self.ffn.fc2.weight.copy_(mlp_weight_1) 218 | self.ffn.fc1.bias.copy_(mlp_bias_0) 219 | self.ffn.fc2.bias.copy_(mlp_bias_1) 220 | 221 | self.attention_norm.weight.copy_(np2th(weights[pjoin(ROOT, ATTENTION_NORM, "scale")])) 222 | self.attention_norm.bias.copy_(np2th(weights[pjoin(ROOT, ATTENTION_NORM, "bias")])) 223 | self.ffn_norm.weight.copy_(np2th(weights[pjoin(ROOT, MLP_NORM, "scale")])) 224 | self.ffn_norm.bias.copy_(np2th(weights[pjoin(ROOT, MLP_NORM, "bias")])) 225 | 226 | 227 | class Encoder(nn.Module): 228 | def __init__(self, config, vis): 229 | super(Encoder, self).__init__() 230 | self.vis = vis 231 | self.layer = nn.ModuleList() 232 | self.encoder_norm = LayerNorm(config.hidden_size, eps=1e-6) 233 | for _ in range(config.transformer["num_layers"]): 234 | layer = Block(config, vis) 235 | self.layer.append(copy.deepcopy(layer)) 236 | 237 | def forward(self, hidden_states): 238 | attn_weights = [] 239 | for layer_block in self.layer: 240 | hidden_states, weights = layer_block(hidden_states) 241 | if self.vis: 242 | attn_weights.append(weights) 243 | encoded = self.encoder_norm(hidden_states) 244 | return encoded, attn_weights 245 | 246 | 247 | class Transformer(nn.Module): 248 | def __init__(self, config, img_size, vis): 249 | super(Transformer, self).__init__() 250 | self.embeddings = Embeddings(config, img_size=img_size) 251 | self.encoder = Encoder(config, vis) 252 | 253 | def forward(self, input_ids): 254 | embedding_output, features = self.embeddings(input_ids) 255 | encoded, attn_weights = self.encoder(embedding_output) # (B, n_patch, hidden) 256 | return encoded, attn_weights, features 257 | 258 | 259 | class Conv2dReLU(nn.Sequential): 260 | def __init__( 261 | self, 262 | in_channels, 263 | out_channels, 264 | kernel_size, 265 | padding=0, 266 | stride=1, 267 | use_batchnorm=True, 268 | ): 269 | conv = nn.Conv2d( 270 | in_channels, 271 | out_channels, 272 | kernel_size, 273 | stride=stride, 274 | padding=padding, 275 | bias=not (use_batchnorm), 276 | ) 277 | relu = nn.ReLU(inplace=True) 278 | 279 | bn = nn.BatchNorm2d(out_channels) 280 | 281 | super(Conv2dReLU, self).__init__(conv, bn, relu) 282 | 283 | 284 | class DecoderBlock(nn.Module): 285 | def __init__( 286 | self, 287 | in_channels, 288 | out_channels, 289 | skip_channels=0, 290 | use_batchnorm=True, 291 | ): 292 | super().__init__() 293 | self.conv1 = Conv2dReLU( 294 | in_channels + skip_channels, 295 | out_channels, 296 | kernel_size=3, 297 | padding=1, 298 | use_batchnorm=use_batchnorm, 299 | ) 300 | self.conv2 = Conv2dReLU( 301 | out_channels, 302 | out_channels, 303 | kernel_size=3, 304 | padding=1, 305 | use_batchnorm=use_batchnorm, 306 | ) 307 | self.up = nn.UpsamplingBilinear2d(scale_factor=2) 308 | 309 | def forward(self, x, skip=None): 310 | x = self.up(x) 311 | if skip is not None: 312 | x = torch.cat([x, skip], dim=1) 313 | x = self.conv1(x) 314 | x = self.conv2(x) 315 | return x 316 | 317 | 318 | class SegmentationHead(nn.Sequential): 319 | 320 | def __init__(self, in_channels, out_channels, kernel_size=3, upsampling=1): 321 | conv2d = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=kernel_size // 2) 322 | upsampling = nn.UpsamplingBilinear2d(scale_factor=upsampling) if upsampling > 1 else nn.Identity() 323 | super().__init__(conv2d, upsampling) 324 | 325 | 326 | class DecoderCup(nn.Module): 327 | def __init__(self, config): 328 | super().__init__() 329 | self.config = config 330 | head_channels = 512 331 | self.conv_more = Conv2dReLU( 332 | config.hidden_size, 333 | head_channels, 334 | kernel_size=3, 335 | padding=1, 336 | use_batchnorm=True, 337 | ) 338 | decoder_channels = config.decoder_channels 339 | in_channels = [head_channels] + list(decoder_channels[:-1]) 340 | out_channels = decoder_channels 341 | 342 | if self.config.n_skip != 0: 343 | skip_channels = self.config.skip_channels 344 | for i in range(4-self.config.n_skip): # re-select the skip channels according to n_skip 345 | skip_channels[3-i]=0 346 | 347 | else: 348 | skip_channels=[0,0,0,0] 349 | 350 | blocks = [ 351 | DecoderBlock(in_ch, out_ch, sk_ch) for in_ch, out_ch, sk_ch in zip(in_channels, out_channels, skip_channels) 352 | ] 353 | self.blocks = nn.ModuleList(blocks) 354 | 355 | def forward(self, hidden_states, features=None): 356 | B, n_patch, hidden = hidden_states.size() # reshape from (B, n_patch, hidden) to (B, h, w, hidden) 357 | h, w = int(np.sqrt(n_patch)), int(np.sqrt(n_patch)) 358 | x = hidden_states.permute(0, 2, 1) 359 | x = x.contiguous().view(B, hidden, h, w) 360 | x = self.conv_more(x) 361 | for i, decoder_block in enumerate(self.blocks): 362 | if features is not None: 363 | skip = features[i] if (i < self.config.n_skip) else None 364 | else: 365 | skip = None 366 | x = decoder_block(x, skip=skip) 367 | return x 368 | 369 | 370 | class VisionTransformer(nn.Module): 371 | def __init__(self, config, img_size=224, num_classes=21843, zero_head=False, vis=False): 372 | super(VisionTransformer, self).__init__() 373 | self.num_classes = num_classes 374 | self.zero_head = zero_head 375 | self.classifier = config.classifier 376 | self.transformer = Transformer(config, img_size, vis) 377 | self.decoder = DecoderCup(config) 378 | self.segmentation_head = SegmentationHead( 379 | in_channels=config['decoder_channels'][-1], 380 | out_channels=config['n_classes'], 381 | kernel_size=3, 382 | ) 383 | self.config = config 384 | 385 | def forward(self, x): 386 | if x.size()[1] == 1: 387 | x = x.repeat(1,3,1,1) 388 | x, attn_weights, features = self.transformer(x) # (B, n_patch, hidden) 389 | x = self.decoder(x, features) 390 | logits = self.segmentation_head(x) 391 | return logits 392 | 393 | def load_from(self, weights): 394 | with torch.no_grad(): 395 | 396 | res_weight = weights 397 | self.transformer.embeddings.patch_embeddings.weight.copy_(np2th(weights["embedding/kernel"], conv=True)) 398 | self.transformer.embeddings.patch_embeddings.bias.copy_(np2th(weights["embedding/bias"])) 399 | 400 | self.transformer.encoder.encoder_norm.weight.copy_(np2th(weights["Transformer/encoder_norm/scale"])) 401 | self.transformer.encoder.encoder_norm.bias.copy_(np2th(weights["Transformer/encoder_norm/bias"])) 402 | 403 | posemb = np2th(weights["Transformer/posembed_input/pos_embedding"]) 404 | 405 | posemb_new = self.transformer.embeddings.position_embeddings 406 | if posemb.size() == posemb_new.size(): 407 | self.transformer.embeddings.position_embeddings.copy_(posemb) 408 | elif posemb.size()[1]-1 == posemb_new.size()[1]: 409 | posemb = posemb[:, 1:] 410 | self.transformer.embeddings.position_embeddings.copy_(posemb) 411 | else: 412 | logger.info("load_pretrained: resized variant: %s to %s" % (posemb.size(), posemb_new.size())) 413 | ntok_new = posemb_new.size(1) 414 | if self.classifier == "seg": 415 | _, posemb_grid = posemb[:, :1], posemb[0, 1:] 416 | gs_old = int(np.sqrt(len(posemb_grid))) 417 | gs_new = int(np.sqrt(ntok_new)) 418 | print('load_pretrained: grid-size from %s to %s' % (gs_old, gs_new)) 419 | posemb_grid = posemb_grid.reshape(gs_old, gs_old, -1) 420 | zoom = (gs_new / gs_old, gs_new / gs_old, 1) 421 | posemb_grid = ndimage.zoom(posemb_grid, zoom, order=1) # th2np 422 | posemb_grid = posemb_grid.reshape(1, gs_new * gs_new, -1) 423 | posemb = posemb_grid 424 | self.transformer.embeddings.position_embeddings.copy_(np2th(posemb)) 425 | 426 | # Encoder whole 427 | for bname, block in self.transformer.encoder.named_children(): 428 | for uname, unit in block.named_children(): 429 | unit.load_from(weights, n_block=uname) 430 | 431 | if self.transformer.embeddings.hybrid: 432 | self.transformer.embeddings.hybrid_model.root.conv.weight.copy_(np2th(res_weight["conv_root/kernel"], conv=True)) 433 | gn_weight = np2th(res_weight["gn_root/scale"]).view(-1) 434 | gn_bias = np2th(res_weight["gn_root/bias"]).view(-1) 435 | self.transformer.embeddings.hybrid_model.root.gn.weight.copy_(gn_weight) 436 | self.transformer.embeddings.hybrid_model.root.gn.bias.copy_(gn_bias) 437 | 438 | for bname, block in self.transformer.embeddings.hybrid_model.body.named_children(): 439 | for uname, unit in block.named_children(): 440 | unit.load_from(res_weight, n_block=bname, n_unit=uname) 441 | 442 | CONFIGS = { 443 | 'ViT-B_16': configs.get_b16_config(), 444 | 'ViT-B_32': configs.get_b32_config(), 445 | 'ViT-L_16': configs.get_l16_config(), 446 | 'ViT-L_32': configs.get_l32_config(), 447 | 'ViT-H_14': configs.get_h14_config(), 448 | 'R50-ViT-B_16': configs.get_r50_b16_config(), 449 | 'R50-ViT-L_16': configs.get_r50_l16_config(), 450 | 'testing': configs.get_testing(), 451 | } 452 | 453 | 454 | -------------------------------------------------------------------------------- /networks/vit_seg_modeling_resnet_skip.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | from os.path import join as pjoin 4 | from collections import OrderedDict 5 | 6 | import torch 7 | import torch.nn as nn 8 | import torch.nn.functional as F 9 | 10 | 11 | def np2th(weights, conv=False): 12 | """Possibly convert HWIO to OIHW.""" 13 | if conv: 14 | weights = weights.transpose([3, 2, 0, 1]) 15 | return torch.from_numpy(weights) 16 | 17 | 18 | class StdConv2d(nn.Conv2d): 19 | 20 | def forward(self, x): 21 | w = self.weight 22 | v, m = torch.var_mean(w, dim=[1, 2, 3], keepdim=True, unbiased=False) 23 | w = (w - m) / torch.sqrt(v + 1e-5) 24 | return F.conv2d(x, w, self.bias, self.stride, self.padding, 25 | self.dilation, self.groups) 26 | 27 | 28 | def conv3x3(cin, cout, stride=1, groups=1, bias=False): 29 | return StdConv2d(cin, cout, kernel_size=3, stride=stride, 30 | padding=1, bias=bias, groups=groups) 31 | 32 | 33 | def conv1x1(cin, cout, stride=1, bias=False): 34 | return StdConv2d(cin, cout, kernel_size=1, stride=stride, 35 | padding=0, bias=bias) 36 | 37 | 38 | class PreActBottleneck(nn.Module): 39 | """Pre-activation (v2) bottleneck block. 40 | """ 41 | 42 | def __init__(self, cin, cout=None, cmid=None, stride=1): 43 | super().__init__() 44 | cout = cout or cin 45 | cmid = cmid or cout//4 46 | 47 | self.gn1 = nn.GroupNorm(32, cmid, eps=1e-6) 48 | self.conv1 = conv1x1(cin, cmid, bias=False) 49 | self.gn2 = nn.GroupNorm(32, cmid, eps=1e-6) 50 | self.conv2 = conv3x3(cmid, cmid, stride, bias=False) # Original code has it on conv1!! 51 | self.gn3 = nn.GroupNorm(32, cout, eps=1e-6) 52 | self.conv3 = conv1x1(cmid, cout, bias=False) 53 | self.relu = nn.ReLU(inplace=True) 54 | 55 | if (stride != 1 or cin != cout): 56 | # Projection also with pre-activation according to paper. 57 | self.downsample = conv1x1(cin, cout, stride, bias=False) 58 | self.gn_proj = nn.GroupNorm(cout, cout) 59 | 60 | def forward(self, x): 61 | 62 | # Residual branch 63 | residual = x 64 | if hasattr(self, 'downsample'): 65 | residual = self.downsample(x) 66 | residual = self.gn_proj(residual) 67 | 68 | # Unit's branch 69 | y = self.relu(self.gn1(self.conv1(x))) 70 | y = self.relu(self.gn2(self.conv2(y))) 71 | y = self.gn3(self.conv3(y)) 72 | 73 | y = self.relu(residual + y) 74 | return y 75 | 76 | def load_from(self, weights, n_block, n_unit): 77 | conv1_weight = np2th(weights[pjoin(n_block, n_unit, "conv1/kernel")], conv=True) 78 | conv2_weight = np2th(weights[pjoin(n_block, n_unit, "conv2/kernel")], conv=True) 79 | conv3_weight = np2th(weights[pjoin(n_block, n_unit, "conv3/kernel")], conv=True) 80 | 81 | gn1_weight = np2th(weights[pjoin(n_block, n_unit, "gn1/scale")]) 82 | gn1_bias = np2th(weights[pjoin(n_block, n_unit, "gn1/bias")]) 83 | 84 | gn2_weight = np2th(weights[pjoin(n_block, n_unit, "gn2/scale")]) 85 | gn2_bias = np2th(weights[pjoin(n_block, n_unit, "gn2/bias")]) 86 | 87 | gn3_weight = np2th(weights[pjoin(n_block, n_unit, "gn3/scale")]) 88 | gn3_bias = np2th(weights[pjoin(n_block, n_unit, "gn3/bias")]) 89 | 90 | self.conv1.weight.copy_(conv1_weight) 91 | self.conv2.weight.copy_(conv2_weight) 92 | self.conv3.weight.copy_(conv3_weight) 93 | 94 | self.gn1.weight.copy_(gn1_weight.view(-1)) 95 | self.gn1.bias.copy_(gn1_bias.view(-1)) 96 | 97 | self.gn2.weight.copy_(gn2_weight.view(-1)) 98 | self.gn2.bias.copy_(gn2_bias.view(-1)) 99 | 100 | self.gn3.weight.copy_(gn3_weight.view(-1)) 101 | self.gn3.bias.copy_(gn3_bias.view(-1)) 102 | 103 | if hasattr(self, 'downsample'): 104 | proj_conv_weight = np2th(weights[pjoin(n_block, n_unit, "conv_proj/kernel")], conv=True) 105 | proj_gn_weight = np2th(weights[pjoin(n_block, n_unit, "gn_proj/scale")]) 106 | proj_gn_bias = np2th(weights[pjoin(n_block, n_unit, "gn_proj/bias")]) 107 | 108 | self.downsample.weight.copy_(proj_conv_weight) 109 | self.gn_proj.weight.copy_(proj_gn_weight.view(-1)) 110 | self.gn_proj.bias.copy_(proj_gn_bias.view(-1)) 111 | 112 | class ResNetV2(nn.Module): 113 | """Implementation of Pre-activation (v2) ResNet mode.""" 114 | 115 | def __init__(self, block_units, width_factor): 116 | super().__init__() 117 | width = int(64 * width_factor) 118 | self.width = width 119 | 120 | self.root = nn.Sequential(OrderedDict([ 121 | ('conv', StdConv2d(3, width, kernel_size=7, stride=2, bias=False, padding=3)), 122 | ('gn', nn.GroupNorm(32, width, eps=1e-6)), 123 | ('relu', nn.ReLU(inplace=True)), 124 | # ('pool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0)) 125 | ])) 126 | 127 | self.body = nn.Sequential(OrderedDict([ 128 | ('block1', nn.Sequential(OrderedDict( 129 | [('unit1', PreActBottleneck(cin=width, cout=width*4, cmid=width))] + 130 | [(f'unit{i:d}', PreActBottleneck(cin=width*4, cout=width*4, cmid=width)) for i in range(2, block_units[0] + 1)], 131 | ))), 132 | ('block2', nn.Sequential(OrderedDict( 133 | [('unit1', PreActBottleneck(cin=width*4, cout=width*8, cmid=width*2, stride=2))] + 134 | [(f'unit{i:d}', PreActBottleneck(cin=width*8, cout=width*8, cmid=width*2)) for i in range(2, block_units[1] + 1)], 135 | ))), 136 | ('block3', nn.Sequential(OrderedDict( 137 | [('unit1', PreActBottleneck(cin=width*8, cout=width*16, cmid=width*4, stride=2))] + 138 | [(f'unit{i:d}', PreActBottleneck(cin=width*16, cout=width*16, cmid=width*4)) for i in range(2, block_units[2] + 1)], 139 | ))), 140 | ])) 141 | 142 | def forward(self, x): 143 | features = [] 144 | b, c, in_size, _ = x.size() 145 | x = self.root(x) 146 | features.append(x) 147 | x = nn.MaxPool2d(kernel_size=3, stride=2, padding=0)(x) 148 | for i in range(len(self.body)-1): 149 | x = self.body[i](x) 150 | right_size = int(in_size / 4 / (i+1)) 151 | if x.size()[2] != right_size: 152 | pad = right_size - x.size()[2] 153 | assert pad < 3 and pad > 0, "x {} should {}".format(x.size(), right_size) 154 | feat = torch.zeros((b, x.size()[1], right_size, right_size), device=x.device) 155 | feat[:, :, 0:x.size()[2], 0:x.size()[3]] = x[:] 156 | else: 157 | feat = x 158 | features.append(feat) 159 | x = self.body[-1](x) 160 | return x, features[::-1] 161 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | torch==1.4.0 2 | torchvision==0.5.0 3 | numpy 4 | tqdm 5 | tensorboard 6 | tensorboardX 7 | ml-collections 8 | medpy 9 | SimpleITK 10 | scipy 11 | h5py 12 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import random 5 | import sys 6 | import numpy as np 7 | import torch 8 | import torch.backends.cudnn as cudnn 9 | import torch.nn as nn 10 | from torch.utils.data import DataLoader 11 | from tqdm import tqdm 12 | from datasets.dataset_synapse import Synapse_dataset 13 | from utils import test_single_volume 14 | from networks.vit_seg_modeling import VisionTransformer as ViT_seg 15 | from networks.vit_seg_modeling import CONFIGS as CONFIGS_ViT_seg 16 | 17 | parser = argparse.ArgumentParser() 18 | parser.add_argument('--volume_path', type=str, 19 | default='../data/Synapse/test_vol_h5', help='root dir for validation volume data') # for acdc volume_path=root_dir 20 | parser.add_argument('--dataset', type=str, 21 | default='Synapse', help='experiment_name') 22 | parser.add_argument('--num_classes', type=int, 23 | default=4, help='output channel of network') 24 | parser.add_argument('--list_dir', type=str, 25 | default='./lists/lists_Synapse', help='list dir') 26 | 27 | parser.add_argument('--max_iterations', type=int,default=20000, help='maximum epoch number to train') 28 | parser.add_argument('--max_epochs', type=int, default=30, help='maximum epoch number to train') 29 | parser.add_argument('--batch_size', type=int, default=24, 30 | help='batch_size per gpu') 31 | parser.add_argument('--img_size', type=int, default=224, help='input patch size of network input') 32 | parser.add_argument('--is_savenii', action="store_true", help='whether to save results during inference') 33 | 34 | parser.add_argument('--n_skip', type=int, default=3, help='using number of skip-connect, default is num') 35 | parser.add_argument('--vit_name', type=str, default='ViT-B_16', help='select one vit model') 36 | 37 | parser.add_argument('--test_save_dir', type=str, default='../predictions', help='saving prediction as nii!') 38 | parser.add_argument('--deterministic', type=int, default=1, help='whether use deterministic training') 39 | parser.add_argument('--base_lr', type=float, default=0.01, help='segmentation network learning rate') 40 | parser.add_argument('--seed', type=int, default=1234, help='random seed') 41 | parser.add_argument('--vit_patches_size', type=int, default=16, help='vit_patches_size, default is 16') 42 | args = parser.parse_args() 43 | 44 | 45 | def inference(args, model, test_save_path=None): 46 | db_test = args.Dataset(base_dir=args.volume_path, split="test_vol", list_dir=args.list_dir) 47 | testloader = DataLoader(db_test, batch_size=1, shuffle=False, num_workers=1) 48 | logging.info("{} test iterations per epoch".format(len(testloader))) 49 | model.eval() 50 | metric_list = 0.0 51 | for i_batch, sampled_batch in tqdm(enumerate(testloader)): 52 | h, w = sampled_batch["image"].size()[2:] 53 | image, label, case_name = sampled_batch["image"], sampled_batch["label"], sampled_batch['case_name'][0] 54 | metric_i = test_single_volume(image, label, model, classes=args.num_classes, patch_size=[args.img_size, args.img_size], 55 | test_save_path=test_save_path, case=case_name, z_spacing=args.z_spacing) 56 | metric_list += np.array(metric_i) 57 | logging.info('idx %d case %s mean_dice %f mean_hd95 %f' % (i_batch, case_name, np.mean(metric_i, axis=0)[0], np.mean(metric_i, axis=0)[1])) 58 | metric_list = metric_list / len(db_test) 59 | for i in range(1, args.num_classes): 60 | logging.info('Mean class %d mean_dice %f mean_hd95 %f' % (i, metric_list[i-1][0], metric_list[i-1][1])) 61 | performance = np.mean(metric_list, axis=0)[0] 62 | mean_hd95 = np.mean(metric_list, axis=0)[1] 63 | logging.info('Testing performance in best val model: mean_dice : %f mean_hd95 : %f' % (performance, mean_hd95)) 64 | return "Testing Finished!" 65 | 66 | 67 | if __name__ == "__main__": 68 | 69 | if not args.deterministic: 70 | cudnn.benchmark = True 71 | cudnn.deterministic = False 72 | else: 73 | cudnn.benchmark = False 74 | cudnn.deterministic = True 75 | random.seed(args.seed) 76 | np.random.seed(args.seed) 77 | torch.manual_seed(args.seed) 78 | torch.cuda.manual_seed(args.seed) 79 | 80 | dataset_config = { 81 | 'Synapse': { 82 | 'Dataset': Synapse_dataset, 83 | 'volume_path': '../data/Synapse/test_vol_h5', 84 | 'list_dir': './lists/lists_Synapse', 85 | 'num_classes': 9, 86 | 'z_spacing': 1, 87 | }, 88 | } 89 | dataset_name = args.dataset 90 | args.num_classes = dataset_config[dataset_name]['num_classes'] 91 | args.volume_path = dataset_config[dataset_name]['volume_path'] 92 | args.Dataset = dataset_config[dataset_name]['Dataset'] 93 | args.list_dir = dataset_config[dataset_name]['list_dir'] 94 | args.z_spacing = dataset_config[dataset_name]['z_spacing'] 95 | args.is_pretrain = True 96 | 97 | # name the same snapshot defined in train script! 98 | args.exp = 'TU_' + dataset_name + str(args.img_size) 99 | snapshot_path = "../model/{}/{}".format(args.exp, 'TU') 100 | snapshot_path = snapshot_path + '_pretrain' if args.is_pretrain else snapshot_path 101 | snapshot_path += '_' + args.vit_name 102 | snapshot_path = snapshot_path + '_skip' + str(args.n_skip) 103 | snapshot_path = snapshot_path + '_vitpatch' + str(args.vit_patches_size) if args.vit_patches_size!=16 else snapshot_path 104 | snapshot_path = snapshot_path + '_epo' + str(args.max_epochs) if args.max_epochs != 30 else snapshot_path 105 | if dataset_name == 'ACDC': # using max_epoch instead of iteration to control training duration 106 | snapshot_path = snapshot_path + '_' + str(args.max_iterations)[0:2] + 'k' if args.max_iterations != 30000 else snapshot_path 107 | snapshot_path = snapshot_path+'_bs'+str(args.batch_size) 108 | snapshot_path = snapshot_path + '_lr' + str(args.base_lr) if args.base_lr != 0.01 else snapshot_path 109 | snapshot_path = snapshot_path + '_'+str(args.img_size) 110 | snapshot_path = snapshot_path + '_s'+str(args.seed) if args.seed!=1234 else snapshot_path 111 | 112 | config_vit = CONFIGS_ViT_seg[args.vit_name] 113 | config_vit.n_classes = args.num_classes 114 | config_vit.n_skip = args.n_skip 115 | config_vit.patches.size = (args.vit_patches_size, args.vit_patches_size) 116 | if args.vit_name.find('R50') !=-1: 117 | config_vit.patches.grid = (int(args.img_size/args.vit_patches_size), int(args.img_size/args.vit_patches_size)) 118 | net = ViT_seg(config_vit, img_size=args.img_size, num_classes=config_vit.n_classes).cuda() 119 | 120 | snapshot = os.path.join(snapshot_path, 'best_model.pth') 121 | if not os.path.exists(snapshot): snapshot = snapshot.replace('best_model', 'epoch_'+str(args.max_epochs-1)) 122 | net.load_state_dict(torch.load(snapshot)) 123 | snapshot_name = snapshot_path.split('/')[-1] 124 | 125 | log_folder = './test_log/test_log_' + args.exp 126 | os.makedirs(log_folder, exist_ok=True) 127 | logging.basicConfig(filename=log_folder + '/'+snapshot_name+".txt", level=logging.INFO, format='[%(asctime)s.%(msecs)03d] %(message)s', datefmt='%H:%M:%S') 128 | logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) 129 | logging.info(str(args)) 130 | logging.info(snapshot_name) 131 | 132 | if args.is_savenii: 133 | args.test_save_dir = '../predictions' 134 | test_save_path = os.path.join(args.test_save_dir, args.exp, snapshot_name) 135 | os.makedirs(test_save_path, exist_ok=True) 136 | else: 137 | test_save_path = None 138 | inference(args, net, test_save_path) 139 | 140 | 141 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import random 5 | import numpy as np 6 | import torch 7 | import torch.backends.cudnn as cudnn 8 | from networks.vit_seg_modeling import VisionTransformer as ViT_seg 9 | from networks.vit_seg_modeling import CONFIGS as CONFIGS_ViT_seg 10 | from trainer import trainer_synapse 11 | 12 | parser = argparse.ArgumentParser() 13 | parser.add_argument('--root_path', type=str, 14 | default='../data/Synapse/train_npz', help='root dir for data') 15 | parser.add_argument('--dataset', type=str, 16 | default='Synapse', help='experiment_name') 17 | parser.add_argument('--list_dir', type=str, 18 | default='./lists/lists_Synapse', help='list dir') 19 | parser.add_argument('--num_classes', type=int, 20 | default=9, help='output channel of network') 21 | parser.add_argument('--max_iterations', type=int, 22 | default=30000, help='maximum epoch number to train') 23 | parser.add_argument('--max_epochs', type=int, 24 | default=150, help='maximum epoch number to train') 25 | parser.add_argument('--batch_size', type=int, 26 | default=24, help='batch_size per gpu') 27 | parser.add_argument('--n_gpu', type=int, default=1, help='total gpu') 28 | parser.add_argument('--deterministic', type=int, default=1, 29 | help='whether use deterministic training') 30 | parser.add_argument('--base_lr', type=float, default=0.01, 31 | help='segmentation network learning rate') 32 | parser.add_argument('--img_size', type=int, 33 | default=224, help='input patch size of network input') 34 | parser.add_argument('--seed', type=int, 35 | default=1234, help='random seed') 36 | parser.add_argument('--n_skip', type=int, 37 | default=3, help='using number of skip-connect, default is num') 38 | parser.add_argument('--vit_name', type=str, 39 | default='R50-ViT-B_16', help='select one vit model') 40 | parser.add_argument('--vit_patches_size', type=int, 41 | default=16, help='vit_patches_size, default is 16') 42 | args = parser.parse_args() 43 | 44 | 45 | if __name__ == "__main__": 46 | if not args.deterministic: 47 | cudnn.benchmark = True 48 | cudnn.deterministic = False 49 | else: 50 | cudnn.benchmark = False 51 | cudnn.deterministic = True 52 | 53 | random.seed(args.seed) 54 | np.random.seed(args.seed) 55 | torch.manual_seed(args.seed) 56 | torch.cuda.manual_seed(args.seed) 57 | dataset_name = args.dataset 58 | dataset_config = { 59 | 'Synapse': { 60 | 'root_path': '../data/Synapse/train_npz', 61 | 'list_dir': './lists/lists_Synapse', 62 | 'num_classes': 9, 63 | }, 64 | } 65 | args.num_classes = dataset_config[dataset_name]['num_classes'] 66 | args.root_path = dataset_config[dataset_name]['root_path'] 67 | args.list_dir = dataset_config[dataset_name]['list_dir'] 68 | args.is_pretrain = True 69 | args.exp = 'TU_' + dataset_name + str(args.img_size) 70 | snapshot_path = "../model/{}/{}".format(args.exp, 'TU') 71 | snapshot_path = snapshot_path + '_pretrain' if args.is_pretrain else snapshot_path 72 | snapshot_path += '_' + args.vit_name 73 | snapshot_path = snapshot_path + '_skip' + str(args.n_skip) 74 | snapshot_path = snapshot_path + '_vitpatch' + str(args.vit_patches_size) if args.vit_patches_size!=16 else snapshot_path 75 | snapshot_path = snapshot_path+'_'+str(args.max_iterations)[0:2]+'k' if args.max_iterations != 30000 else snapshot_path 76 | snapshot_path = snapshot_path + '_epo' +str(args.max_epochs) if args.max_epochs != 30 else snapshot_path 77 | snapshot_path = snapshot_path+'_bs'+str(args.batch_size) 78 | snapshot_path = snapshot_path + '_lr' + str(args.base_lr) if args.base_lr != 0.01 else snapshot_path 79 | snapshot_path = snapshot_path + '_'+str(args.img_size) 80 | snapshot_path = snapshot_path + '_s'+str(args.seed) if args.seed!=1234 else snapshot_path 81 | 82 | if not os.path.exists(snapshot_path): 83 | os.makedirs(snapshot_path) 84 | config_vit = CONFIGS_ViT_seg[args.vit_name] 85 | config_vit.n_classes = args.num_classes 86 | config_vit.n_skip = args.n_skip 87 | if args.vit_name.find('R50') != -1: 88 | config_vit.patches.grid = (int(args.img_size / args.vit_patches_size), int(args.img_size / args.vit_patches_size)) 89 | net = ViT_seg(config_vit, img_size=args.img_size, num_classes=config_vit.n_classes).cuda() 90 | net.load_from(weights=np.load(config_vit.pretrained_path)) 91 | 92 | trainer = {'Synapse': trainer_synapse,} 93 | trainer[dataset_name](args, net, snapshot_path) -------------------------------------------------------------------------------- /trainer.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | import random 5 | import sys 6 | import time 7 | import numpy as np 8 | import torch 9 | import torch.nn as nn 10 | import torch.optim as optim 11 | from tensorboardX import SummaryWriter 12 | from torch.nn.modules.loss import CrossEntropyLoss 13 | from torch.utils.data import DataLoader 14 | from tqdm import tqdm 15 | from utils import DiceLoss 16 | from torchvision import transforms 17 | 18 | def trainer_synapse(args, model, snapshot_path): 19 | from datasets.dataset_synapse import Synapse_dataset, RandomGenerator 20 | logging.basicConfig(filename=snapshot_path + "/log.txt", level=logging.INFO, 21 | format='[%(asctime)s.%(msecs)03d] %(message)s', datefmt='%H:%M:%S') 22 | logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) 23 | logging.info(str(args)) 24 | base_lr = args.base_lr 25 | num_classes = args.num_classes 26 | batch_size = args.batch_size * args.n_gpu 27 | # max_iterations = args.max_iterations 28 | db_train = Synapse_dataset(base_dir=args.root_path, list_dir=args.list_dir, split="train", 29 | transform=transforms.Compose( 30 | [RandomGenerator(output_size=[args.img_size, args.img_size])])) 31 | print("The length of train set is: {}".format(len(db_train))) 32 | 33 | def worker_init_fn(worker_id): 34 | random.seed(args.seed + worker_id) 35 | 36 | trainloader = DataLoader(db_train, batch_size=batch_size, shuffle=True, num_workers=8, pin_memory=True, 37 | worker_init_fn=worker_init_fn) 38 | if args.n_gpu > 1: 39 | model = nn.DataParallel(model) 40 | model.train() 41 | ce_loss = CrossEntropyLoss() 42 | dice_loss = DiceLoss(num_classes) 43 | optimizer = optim.SGD(model.parameters(), lr=base_lr, momentum=0.9, weight_decay=0.0001) 44 | writer = SummaryWriter(snapshot_path + '/log') 45 | iter_num = 0 46 | max_epoch = args.max_epochs 47 | max_iterations = args.max_epochs * len(trainloader) # max_epoch = max_iterations // len(trainloader) + 1 48 | logging.info("{} iterations per epoch. {} max iterations ".format(len(trainloader), max_iterations)) 49 | best_performance = 0.0 50 | iterator = tqdm(range(max_epoch), ncols=70) 51 | for epoch_num in iterator: 52 | for i_batch, sampled_batch in enumerate(trainloader): 53 | image_batch, label_batch = sampled_batch['image'], sampled_batch['label'] 54 | image_batch, label_batch = image_batch.cuda(), label_batch.cuda() 55 | outputs = model(image_batch) 56 | loss_ce = ce_loss(outputs, label_batch[:].long()) 57 | loss_dice = dice_loss(outputs, label_batch, softmax=True) 58 | loss = 0.5 * loss_ce + 0.5 * loss_dice 59 | optimizer.zero_grad() 60 | loss.backward() 61 | optimizer.step() 62 | lr_ = base_lr * (1.0 - iter_num / max_iterations) ** 0.9 63 | for param_group in optimizer.param_groups: 64 | param_group['lr'] = lr_ 65 | 66 | iter_num = iter_num + 1 67 | writer.add_scalar('info/lr', lr_, iter_num) 68 | writer.add_scalar('info/total_loss', loss, iter_num) 69 | writer.add_scalar('info/loss_ce', loss_ce, iter_num) 70 | 71 | logging.info('iteration %d : loss : %f, loss_ce: %f' % (iter_num, loss.item(), loss_ce.item())) 72 | 73 | if iter_num % 20 == 0: 74 | image = image_batch[1, 0:1, :, :] 75 | image = (image - image.min()) / (image.max() - image.min()) 76 | writer.add_image('train/Image', image, iter_num) 77 | outputs = torch.argmax(torch.softmax(outputs, dim=1), dim=1, keepdim=True) 78 | writer.add_image('train/Prediction', outputs[1, ...] * 50, iter_num) 79 | labs = label_batch[1, ...].unsqueeze(0) * 50 80 | writer.add_image('train/GroundTruth', labs, iter_num) 81 | 82 | save_interval = 50 # int(max_epoch/6) 83 | if epoch_num > int(max_epoch / 2) and (epoch_num + 1) % save_interval == 0: 84 | save_mode_path = os.path.join(snapshot_path, 'epoch_' + str(epoch_num) + '.pth') 85 | torch.save(model.state_dict(), save_mode_path) 86 | logging.info("save model to {}".format(save_mode_path)) 87 | 88 | if epoch_num >= max_epoch - 1: 89 | save_mode_path = os.path.join(snapshot_path, 'epoch_' + str(epoch_num) + '.pth') 90 | torch.save(model.state_dict(), save_mode_path) 91 | logging.info("save model to {}".format(save_mode_path)) 92 | iterator.close() 93 | break 94 | 95 | writer.close() 96 | return "Training Finished!" -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from medpy import metric 4 | from scipy.ndimage import zoom 5 | import torch.nn as nn 6 | import SimpleITK as sitk 7 | 8 | 9 | class DiceLoss(nn.Module): 10 | def __init__(self, n_classes): 11 | super(DiceLoss, self).__init__() 12 | self.n_classes = n_classes 13 | 14 | def _one_hot_encoder(self, input_tensor): 15 | tensor_list = [] 16 | for i in range(self.n_classes): 17 | temp_prob = input_tensor == i # * torch.ones_like(input_tensor) 18 | tensor_list.append(temp_prob.unsqueeze(1)) 19 | output_tensor = torch.cat(tensor_list, dim=1) 20 | return output_tensor.float() 21 | 22 | def _dice_loss(self, score, target): 23 | target = target.float() 24 | smooth = 1e-5 25 | intersect = torch.sum(score * target) 26 | y_sum = torch.sum(target * target) 27 | z_sum = torch.sum(score * score) 28 | loss = (2 * intersect + smooth) / (z_sum + y_sum + smooth) 29 | loss = 1 - loss 30 | return loss 31 | 32 | def forward(self, inputs, target, weight=None, softmax=False): 33 | if softmax: 34 | inputs = torch.softmax(inputs, dim=1) 35 | target = self._one_hot_encoder(target) 36 | if weight is None: 37 | weight = [1] * self.n_classes 38 | assert inputs.size() == target.size(), 'predict {} & target {} shape do not match'.format(inputs.size(), target.size()) 39 | class_wise_dice = [] 40 | loss = 0.0 41 | for i in range(0, self.n_classes): 42 | dice = self._dice_loss(inputs[:, i], target[:, i]) 43 | class_wise_dice.append(1.0 - dice.item()) 44 | loss += dice * weight[i] 45 | return loss / self.n_classes 46 | 47 | 48 | def calculate_metric_percase(pred, gt): 49 | pred[pred > 0] = 1 50 | gt[gt > 0] = 1 51 | if pred.sum() > 0 and gt.sum()>0: 52 | dice = metric.binary.dc(pred, gt) 53 | hd95 = metric.binary.hd95(pred, gt) 54 | return dice, hd95 55 | elif pred.sum() > 0 and gt.sum()==0: 56 | return 1, 0 57 | else: 58 | return 0, 0 59 | 60 | 61 | def test_single_volume(image, label, net, classes, patch_size=[256, 256], test_save_path=None, case=None, z_spacing=1): 62 | image, label = image.squeeze(0).cpu().detach().numpy(), label.squeeze(0).cpu().detach().numpy() 63 | if len(image.shape) == 3: 64 | prediction = np.zeros_like(label) 65 | for ind in range(image.shape[0]): 66 | slice = image[ind, :, :] 67 | x, y = slice.shape[0], slice.shape[1] 68 | if x != patch_size[0] or y != patch_size[1]: 69 | slice = zoom(slice, (patch_size[0] / x, patch_size[1] / y), order=3) # previous using 0 70 | input = torch.from_numpy(slice).unsqueeze(0).unsqueeze(0).float().cuda() 71 | net.eval() 72 | with torch.no_grad(): 73 | outputs = net(input) 74 | out = torch.argmax(torch.softmax(outputs, dim=1), dim=1).squeeze(0) 75 | out = out.cpu().detach().numpy() 76 | if x != patch_size[0] or y != patch_size[1]: 77 | pred = zoom(out, (x / patch_size[0], y / patch_size[1]), order=0) 78 | else: 79 | pred = out 80 | prediction[ind] = pred 81 | else: 82 | input = torch.from_numpy(image).unsqueeze( 83 | 0).unsqueeze(0).float().cuda() 84 | net.eval() 85 | with torch.no_grad(): 86 | out = torch.argmax(torch.softmax(net(input), dim=1), dim=1).squeeze(0) 87 | prediction = out.cpu().detach().numpy() 88 | metric_list = [] 89 | for i in range(1, classes): 90 | metric_list.append(calculate_metric_percase(prediction == i, label == i)) 91 | 92 | if test_save_path is not None: 93 | img_itk = sitk.GetImageFromArray(image.astype(np.float32)) 94 | prd_itk = sitk.GetImageFromArray(prediction.astype(np.float32)) 95 | lab_itk = sitk.GetImageFromArray(label.astype(np.float32)) 96 | img_itk.SetSpacing((1, 1, z_spacing)) 97 | prd_itk.SetSpacing((1, 1, z_spacing)) 98 | lab_itk.SetSpacing((1, 1, z_spacing)) 99 | sitk.WriteImage(prd_itk, test_save_path + '/'+case + "_pred.nii.gz") 100 | sitk.WriteImage(img_itk, test_save_path + '/'+ case + "_img.nii.gz") 101 | sitk.WriteImage(lab_itk, test_save_path + '/'+ case + "_gt.nii.gz") 102 | return metric_list --------------------------------------------------------------------------------