├── LICENSE ├── README.md ├── data ├── mimic2 │ └── ALL_CODES.txt └── mimic3 │ ├── ALL_CODES.txt │ └── ALL_CODES_50.txt ├── requirements.txt └── src ├── evaluation.py ├── modeling_bert.py ├── modeling_longformer.py ├── modeling_roberta.py └── run_icd.py /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PLM-ICD: Automatic ICD Coding with Pretrained Language Models 2 | - [ClinicalNLP 2022 Paper](https://aclanthology.org/2022.clinicalnlp-1.2/) 3 | 4 | ![image](https://github.com/MiuLab/PLM-ICD/assets/2268109/dae49ada-c135-4679-90d2-a00f14884aa8) 5 | 6 | ## Reference 7 | Please cite the following paper: 8 | ``` 9 | @inproceedings{huang-etal-2022-plm, 10 | title = "{PLM}-{ICD}: Automatic {ICD} Coding with Pretrained Language Models", 11 | author = "Huang, Chao-Wei and Tsai, Shang-Chi and Chen, Yun-Nung", 12 | booktitle = "Proceedings of the 4th Clinical Natural Language Processing Workshop", 13 | month = jul, 14 | year = "2022", 15 | address = "Seattle, WA", 16 | publisher = "Association for Computational Linguistics", 17 | url = "https://aclanthology.org/2022.clinicalnlp-1.2", 18 | pages = "10--20", 19 | } 20 | ``` 21 | 22 | 23 | ## Requirements 24 | * Python >= 3.6 25 | * Install the required Python packages with `pip3 install -r requirements.txt` 26 | * If the specific versions could not be found in your distribution, you could simple remove the version constraint. Our code should work with most versions. 27 | 28 | ## Dataset 29 | Unfortunately, we are not allowed to redistribute the MIMIC dataset. 30 | Please follow the instructions from [caml-mimic](https://github.com/jamesmullenbach/caml-mimic) to preprocess the MIMIC-2 and MIMIC-3 dataset and place the files under `data/mimic2` and `data/mimic3` respectively. 31 | 32 | ## How to run 33 | ### Pretrained LMs 34 | Please download the pretrained LMs you want to use from the following link: 35 | - [BioLM](https://github.com/facebookresearch/bio-lm): RoBERTa-PM models 36 | - [BioBERT](https://github.com/dmis-lab/biobert) 37 | - [PubMedBERT](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract): you can also set `--model_name_or_path microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract` when training the model, the script will download the checkpoint automatically. 38 | 39 | ### Trained Models 40 | You can also download our [trained models](https://drive.google.com/drive/folders/1oJLgLKu_NZxsSTXU9uFVehxXXJYzTalO?usp=sharing) to skip the training part. We provide 3 trained models: 41 | - [Trained on MIMIC-3 full](https://drive.google.com/drive/folders/1SXlyh4ydRqlLwed_tiBA2mNCDjVll6gD?usp=sharing) 42 | - [Trained on MIMIC-3 50](https://drive.google.com/drive/folders/12xRNiaXbwmrAcqzkUo96EpopBuICnWqR?usp=sharing) 43 | - [Trained on MIMIC-2](https://drive.google.com/drive/folders/1tmopSwLccrBpHCoalAz-oRKAlxBvyF0H?usp=sharing) 44 | 45 | ### Training 46 | 1. `cd src` 47 | 2. Run the following command to train a model on MIMIC-3 full. 48 | ``` 49 | python3 run_icd.py \ 50 | --train_file ../data/mimic3/train_full.csv \ 51 | --validation_file ../data/mimic3/dev_full.csv \ 52 | --max_length 3072 \ 53 | --chunk_size 128 \ 54 | --model_name_or_path ../models/RoBERTa-base-PM-M3-Voc-distill-align-hf \ 55 | --per_device_train_batch_size 1 \ 56 | --gradient_accumulation_steps 8 \ 57 | --per_device_eval_batch_size 1 \ 58 | --num_train_epochs 20 \ 59 | --num_warmup_steps 2000 \ 60 | --output_dir ../models/roberta-mimic3-full \ 61 | --model_type roberta \ 62 | --model_mode laat 63 | ``` 64 | 65 | ### Notes 66 | - If you would like to train BERT-based or Longformer-base models, please set `--model_type [bert|longformer]`. 67 | - If you would like to train models on MIMIC-3 top-50, please set `--code_50 --code_file ../data/mimic3/ALL_CODES_50.txt` 68 | - If you would like to train models on MIMIC-2, please set `--code_file ../data/mimic2/ALL_CODES.txt` 69 | 70 | ### Inference 71 | 1. `cd src` 72 | 2. Run the following command to evaluate a model on the test set of MIMIC-3 full. 73 | ``` 74 | python3 run_icd.py \ 75 | --train_file ../data/mimic3/train_full.csv \ 76 | --validation_file ../data/mimic3/test_full.csv \ 77 | --max_length 3072 \ 78 | --chunk_size 128 \ 79 | --model_name_or_path ../models/roberta-mimic3-full \ 80 | --per_device_eval_batch_size 1 \ 81 | --num_train_epochs 0 \ 82 | --output_dir ../models/roberta-mimic3-full \ 83 | --model_type roberta \ 84 | --model_mode laat 85 | ``` 86 | -------------------------------------------------------------------------------- /data/mimic2/ALL_CODES.txt: -------------------------------------------------------------------------------- 1 | 004.1 2 | 004.8 3 | 004.9 4 | 005.1 5 | 005.81 6 | 005.9 7 | 007.4 8 | 008.45 9 | 008.5 10 | 008.61 11 | 008.62 12 | 008.63 13 | 008.69 14 | 008.8 15 | 009.0 16 | 009.1 17 | 009.2 18 | 009.3 19 | 011.36 20 | 011.64 21 | 011.90 22 | 011.93 23 | 011.94 24 | 012.05 25 | 012.15 26 | 013.00 27 | 013.04 28 | 013.25 29 | 013.54 30 | 015.04 31 | 018.03 32 | 018.05 33 | 018.94 34 | 021.8 35 | 023.9 36 | 027.0 37 | 027.2 38 | 031.1 39 | 031.2 40 | 031.9 41 | 032.85 42 | 033.8 43 | 034.0 44 | 035 45 | 036.0 46 | 036.2 47 | 036.41 48 | 038.0 49 | 038.10 50 | 038.11 51 | 038.19 52 | 038.2 53 | 038.3 54 | 038.40 55 | 038.41 56 | 038.42 57 | 038.43 58 | 038.44 59 | 038.49 60 | 038.8 61 | 038.9 62 | 039.1 63 | 039.2 64 | 039.9 65 | 040.0 66 | 040.82 67 | 040.89 68 | 041.00 69 | 041.01 70 | 041.02 71 | 041.03 72 | 041.04 73 | 041.05 74 | 041.09 75 | 041.10 76 | 041.11 77 | 041.19 78 | 041.2 79 | 041.3 80 | 041.4 81 | 041.5 82 | 041.6 83 | 041.7 84 | 041.82 85 | 041.83 86 | 041.84 87 | 041.85 88 | 041.86 89 | 041.89 90 | 041.9 91 | 042 92 | 046.3 93 | 047.8 94 | 047.9 95 | 048 96 | 049.8 97 | 049.9 98 | 052.0 99 | 052.1 100 | 052.7 101 | 052.9 102 | 053.0 103 | 053.12 104 | 053.13 105 | 053.19 106 | 053.20 107 | 053.29 108 | 053.79 109 | 053.9 110 | 054.10 111 | 054.2 112 | 054.3 113 | 054.5 114 | 054.71 115 | 054.72 116 | 054.79 117 | 054.9 118 | 057.9 119 | 062.2 120 | 070.0 121 | 070.1 122 | 070.20 123 | 070.22 124 | 070.30 125 | 070.31 126 | 070.32 127 | 070.33 128 | 070.41 129 | 070.44 130 | 070.51 131 | 070.52 132 | 070.54 133 | 070.59 134 | 070.70 135 | 070.71 136 | 075 137 | 077.8 138 | 077.99 139 | 078.0 140 | 078.10 141 | 078.19 142 | 078.5 143 | 078.89 144 | 079.0 145 | 079.3 146 | 079.4 147 | 079.51 148 | 079.6 149 | 079.89 150 | 079.99 151 | 082.40 152 | 083.9 153 | 084.0 154 | 084.4 155 | 084.6 156 | 085.9 157 | 086.0 158 | 088.81 159 | 088.82 160 | 091.2 161 | 091.81 162 | 094.0 163 | 094.9 164 | 096 165 | 097.0 166 | 098.0 167 | 110.0 168 | 110.1 169 | 110.3 170 | 110.4 171 | 110.5 172 | 110.8 173 | 110.9 174 | 111.0 175 | 111.9 176 | 112.0 177 | 112.1 178 | 112.2 179 | 112.3 180 | 112.4 181 | 112.5 182 | 112.84 183 | 112.89 184 | 112.9 185 | 114.0 186 | 115.99 187 | 117.3 188 | 117.4 189 | 117.5 190 | 117.7 191 | 117.9 192 | 120.8 193 | 121.1 194 | 123.1 195 | 125.1 196 | 127.0 197 | 127.2 198 | 129 199 | 130.0 200 | 130.7 201 | 130.8 202 | 130.9 203 | 131.02 204 | 133.0 205 | 134.8 206 | 135 207 | 136.3 208 | 136.9 209 | 137.0 210 | 137.3 211 | 138 212 | 139.0 213 | 139.8 214 | 140.0 215 | 140.1 216 | 140.9 217 | 141.0 218 | 141.4 219 | 141.8 220 | 141.9 221 | 142.0 222 | 142.9 223 | 143.1 224 | 144.0 225 | 144.8 226 | 145.0 227 | 145.3 228 | 145.8 229 | 146.0 230 | 146.7 231 | 146.8 232 | 147.1 233 | 147.8 234 | 147.9 235 | 148.1 236 | 149.0 237 | 150.1 238 | 150.3 239 | 150.4 240 | 150.5 241 | 150.8 242 | 150.9 243 | 151.0 244 | 151.1 245 | 151.2 246 | 151.3 247 | 151.4 248 | 151.5 249 | 151.6 250 | 151.8 251 | 151.9 252 | 152.0 253 | 152.1 254 | 152.2 255 | 152.8 256 | 153.0 257 | 153.1 258 | 153.2 259 | 153.3 260 | 153.4 261 | 153.6 262 | 153.7 263 | 153.8 264 | 153.9 265 | 154.0 266 | 154.1 267 | 154.2 268 | 154.3 269 | 154.8 270 | 155.0 271 | 155.1 272 | 155.2 273 | 156.0 274 | 156.1 275 | 156.2 276 | 156.8 277 | 156.9 278 | 157.0 279 | 157.1 280 | 157.2 281 | 157.3 282 | 157.4 283 | 157.8 284 | 157.9 285 | 158.0 286 | 158.8 287 | 158.9 288 | 159.8 289 | 159.9 290 | 160.2 291 | 160.3 292 | 160.8 293 | 160.9 294 | 161.0 295 | 161.1 296 | 161.2 297 | 161.3 298 | 161.8 299 | 161.9 300 | 162.2 301 | 162.3 302 | 162.4 303 | 162.5 304 | 162.8 305 | 162.9 306 | 163.8 307 | 163.9 308 | 164.0 309 | 164.1 310 | 164.2 311 | 164.8 312 | 164.9 313 | 170.0 314 | 170.2 315 | 170.7 316 | 171.2 317 | 171.3 318 | 171.4 319 | 171.5 320 | 171.6 321 | 171.8 322 | 172.0 323 | 172.1 324 | 172.3 325 | 172.4 326 | 172.5 327 | 172.7 328 | 172.8 329 | 172.9 330 | 173.2 331 | 173.3 332 | 173.4 333 | 173.5 334 | 173.6 335 | 173.7 336 | 174.3 337 | 174.4 338 | 174.8 339 | 174.9 340 | 175.9 341 | 176.1 342 | 176.3 343 | 176.4 344 | 176.9 345 | 180.0 346 | 180.8 347 | 180.9 348 | 182.0 349 | 183.0 350 | 184.0 351 | 184.4 352 | 184.8 353 | 185 354 | 186.9 355 | 187.4 356 | 188.0 357 | 188.2 358 | 188.3 359 | 188.4 360 | 188.5 361 | 188.8 362 | 188.9 363 | 189.0 364 | 189.1 365 | 189.2 366 | 191.0 367 | 191.1 368 | 191.2 369 | 191.3 370 | 191.4 371 | 191.5 372 | 191.7 373 | 191.8 374 | 191.9 375 | 192.0 376 | 192.1 377 | 192.2 378 | 193 379 | 194.0 380 | 194.1 381 | 195.0 382 | 195.2 383 | 195.3 384 | 196.0 385 | 196.1 386 | 196.2 387 | 196.3 388 | 196.5 389 | 196.6 390 | 196.8 391 | 196.9 392 | 197.0 393 | 197.1 394 | 197.2 395 | 197.3 396 | 197.4 397 | 197.5 398 | 197.6 399 | 197.7 400 | 197.8 401 | 198.0 402 | 198.1 403 | 198.2 404 | 198.3 405 | 198.4 406 | 198.5 407 | 198.6 408 | 198.7 409 | 198.81 410 | 198.82 411 | 198.89 412 | 199.0 413 | 199.1 414 | 200.00 415 | 200.01 416 | 200.02 417 | 200.03 418 | 200.05 419 | 200.08 420 | 200.10 421 | 200.11 422 | 200.20 423 | 200.21 424 | 200.22 425 | 200.23 426 | 200.28 427 | 200.42 428 | 200.71 429 | 200.80 430 | 200.88 431 | 201.50 432 | 201.58 433 | 201.90 434 | 201.92 435 | 201.98 436 | 202.00 437 | 202.01 438 | 202.10 439 | 202.12 440 | 202.13 441 | 202.18 442 | 202.20 443 | 202.40 444 | 202.43 445 | 202.60 446 | 202.70 447 | 202.80 448 | 202.81 449 | 202.82 450 | 202.83 451 | 202.85 452 | 202.87 453 | 202.88 454 | 202.90 455 | 203.00 456 | 203.01 457 | 203.10 458 | 203.80 459 | 204.00 460 | 204.01 461 | 204.10 462 | 204.11 463 | 204.90 464 | 205.00 465 | 205.01 466 | 205.10 467 | 205.11 468 | 205.30 469 | 205.90 470 | 208.00 471 | 208.90 472 | 208.91 473 | 210.4 474 | 211.0 475 | 211.1 476 | 211.2 477 | 211.3 478 | 211.4 479 | 211.5 480 | 211.6 481 | 211.8 482 | 211.9 483 | 212.1 484 | 212.2 485 | 212.3 486 | 212.6 487 | 212.7 488 | 213.0 489 | 213.2 490 | 214.1 491 | 214.3 492 | 214.8 493 | 214.9 494 | 215.4 495 | 215.5 496 | 216.3 497 | 216.5 498 | 216.6 499 | 217 500 | 218.0 501 | 218.1 502 | 218.2 503 | 218.9 504 | 219.1 505 | 220 506 | 221.0 507 | 223.0 508 | 225.0 509 | 225.1 510 | 225.2 511 | 225.4 512 | 226 513 | 227.0 514 | 227.1 515 | 227.3 516 | 228.01 517 | 228.02 518 | 228.04 519 | 228.09 520 | 228.1 521 | 229.8 522 | 230.0 523 | 230.1 524 | 230.2 525 | 230.9 526 | 231.2 527 | 232.3 528 | 232.9 529 | 233.0 530 | 233.1 531 | 233.3 532 | 233.4 533 | 233.7 534 | 233.9 535 | 235.2 536 | 235.3 537 | 235.4 538 | 235.5 539 | 235.6 540 | 235.7 541 | 236.0 542 | 236.2 543 | 236.91 544 | 237.0 545 | 237.3 546 | 237.5 547 | 237.6 548 | 237.70 549 | 237.71 550 | 238.0 551 | 238.1 552 | 238.2 553 | 238.4 554 | 238.6 555 | 238.7 556 | 238.71 557 | 238.75 558 | 238.76 559 | 238.79 560 | 239.0 561 | 239.1 562 | 239.2 563 | 239.4 564 | 239.5 565 | 239.6 566 | 239.7 567 | 240.9 568 | 241.0 569 | 241.1 570 | 241.9 571 | 242.00 572 | 242.01 573 | 242.20 574 | 242.30 575 | 242.80 576 | 242.81 577 | 242.90 578 | 242.91 579 | 244.0 580 | 244.1 581 | 244.2 582 | 244.3 583 | 244.8 584 | 244.9 585 | 245.2 586 | 245.4 587 | 245.9 588 | 246.2 589 | 246.9 590 | 250.00 591 | 250.01 592 | 250.02 593 | 250.03 594 | 250.10 595 | 250.11 596 | 250.12 597 | 250.13 598 | 250.20 599 | 250.21 600 | 250.22 601 | 250.23 602 | 250.30 603 | 250.31 604 | 250.32 605 | 250.33 606 | 250.40 607 | 250.41 608 | 250.42 609 | 250.43 610 | 250.50 611 | 250.51 612 | 250.52 613 | 250.53 614 | 250.60 615 | 250.61 616 | 250.62 617 | 250.63 618 | 250.70 619 | 250.71 620 | 250.72 621 | 250.73 622 | 250.80 623 | 250.81 624 | 250.82 625 | 250.83 626 | 250.90 627 | 250.91 628 | 250.92 629 | 250.93 630 | 251.1 631 | 251.2 632 | 251.3 633 | 251.5 634 | 251.8 635 | 252.0 636 | 252.00 637 | 252.01 638 | 252.08 639 | 252.1 640 | 253.0 641 | 253.1 642 | 253.2 643 | 253.4 644 | 253.5 645 | 253.6 646 | 253.7 647 | 253.8 648 | 253.9 649 | 254.0 650 | 254.8 651 | 255.0 652 | 255.10 653 | 255.2 654 | 255.3 655 | 255.4 656 | 255.41 657 | 255.5 658 | 255.8 659 | 255.9 660 | 256.1 661 | 256.39 662 | 256.4 663 | 257.2 664 | 258.01 665 | 258.1 666 | 258.8 667 | 258.9 668 | 259.4 669 | 259.9 670 | 260 671 | 261 672 | 262 673 | 263.0 674 | 263.1 675 | 263.8 676 | 263.9 677 | 265.1 678 | 266.2 679 | 267 680 | 268.2 681 | 268.9 682 | 269.0 683 | 269.8 684 | 269.9 685 | 270.0 686 | 270.4 687 | 271.0 688 | 271.3 689 | 271.8 690 | 272.0 691 | 272.1 692 | 272.2 693 | 272.4 694 | 272.5 695 | 272.6 696 | 272.8 697 | 272.9 698 | 273.0 699 | 273.1 700 | 273.2 701 | 273.3 702 | 273.4 703 | 273.8 704 | 273.9 705 | 274.0 706 | 274.82 707 | 274.9 708 | 275.0 709 | 275.1 710 | 275.2 711 | 275.3 712 | 275.40 713 | 275.41 714 | 275.42 715 | 275.49 716 | 275.8 717 | 276.0 718 | 276.1 719 | 276.2 720 | 276.3 721 | 276.4 722 | 276.5 723 | 276.50 724 | 276.51 725 | 276.52 726 | 276.6 727 | 276.7 728 | 276.8 729 | 276.9 730 | 277.00 731 | 277.1 732 | 277.3 733 | 277.30 734 | 277.39 735 | 277.4 736 | 277.6 737 | 277.7 738 | 277.8 739 | 277.87 740 | 277.89 741 | 277.9 742 | 278.00 743 | 278.01 744 | 278.1 745 | 278.8 746 | 279.00 747 | 279.01 748 | 279.06 749 | 279.4 750 | 279.9 751 | 280.0 752 | 280.8 753 | 280.9 754 | 281.0 755 | 281.1 756 | 281.2 757 | 281.8 758 | 281.9 759 | 282.1 760 | 282.2 761 | 282.4 762 | 282.49 763 | 282.5 764 | 282.60 765 | 282.62 766 | 282.9 767 | 283.0 768 | 283.11 769 | 283.19 770 | 283.2 771 | 283.9 772 | 284.1 773 | 284.8 774 | 284.89 775 | 284.9 776 | 285.1 777 | 285.21 778 | 285.22 779 | 285.29 780 | 285.8 781 | 285.9 782 | 286.0 783 | 286.1 784 | 286.2 785 | 286.3 786 | 286.4 787 | 286.5 788 | 286.6 789 | 286.7 790 | 286.9 791 | 287.0 792 | 287.1 793 | 287.2 794 | 287.3 795 | 287.30 796 | 287.31 797 | 287.4 798 | 287.5 799 | 287.9 800 | 288.0 801 | 288.00 802 | 288.02 803 | 288.03 804 | 288.04 805 | 288.09 806 | 288.3 807 | 288.4 808 | 288.50 809 | 288.60 810 | 288.61 811 | 288.62 812 | 288.64 813 | 288.66 814 | 288.8 815 | 288.9 816 | 289.0 817 | 289.3 818 | 289.4 819 | 289.50 820 | 289.51 821 | 289.52 822 | 289.59 823 | 289.7 824 | 289.8 825 | 289.81 826 | 289.82 827 | 289.83 828 | 289.89 829 | 289.9 830 | 290.0 831 | 290.11 832 | 290.12 833 | 290.3 834 | 290.40 835 | 290.41 836 | 290.43 837 | 291.0 838 | 291.1 839 | 291.2 840 | 291.3 841 | 291.81 842 | 292.0 843 | 292.11 844 | 292.12 845 | 292.81 846 | 292.84 847 | 292.85 848 | 292.89 849 | 292.9 850 | 293.0 851 | 293.1 852 | 293.83 853 | 293.84 854 | 293.89 855 | 293.9 856 | 294.0 857 | 294.10 858 | 294.11 859 | 294.8 860 | 294.9 861 | 295.20 862 | 295.30 863 | 295.32 864 | 295.40 865 | 295.60 866 | 295.62 867 | 295.70 868 | 295.72 869 | 295.73 870 | 295.74 871 | 295.80 872 | 295.90 873 | 295.92 874 | 296.00 875 | 296.04 876 | 296.20 877 | 296.22 878 | 296.23 879 | 296.24 880 | 296.30 881 | 296.32 882 | 296.33 883 | 296.34 884 | 296.40 885 | 296.44 886 | 296.50 887 | 296.53 888 | 296.54 889 | 296.60 890 | 296.7 891 | 296.80 892 | 296.81 893 | 296.89 894 | 296.90 895 | 296.99 896 | 297.1 897 | 297.2 898 | 297.9 899 | 298.4 900 | 298.9 901 | 299.00 902 | 299.80 903 | 300.00 904 | 300.01 905 | 300.02 906 | 300.09 907 | 300.11 908 | 300.12 909 | 300.14 910 | 300.15 911 | 300.16 912 | 300.19 913 | 300.21 914 | 300.22 915 | 300.29 916 | 300.3 917 | 300.4 918 | 300.7 919 | 300.9 920 | 301.0 921 | 301.20 922 | 301.22 923 | 301.4 924 | 301.51 925 | 301.7 926 | 301.81 927 | 301.83 928 | 301.9 929 | 302.50 930 | 303.00 931 | 303.01 932 | 303.02 933 | 303.03 934 | 303.90 935 | 303.91 936 | 303.92 937 | 303.93 938 | 304.00 939 | 304.01 940 | 304.02 941 | 304.03 942 | 304.10 943 | 304.11 944 | 304.20 945 | 304.21 946 | 304.22 947 | 304.23 948 | 304.30 949 | 304.31 950 | 304.40 951 | 304.41 952 | 304.60 953 | 304.61 954 | 304.70 955 | 304.71 956 | 304.73 957 | 304.80 958 | 304.90 959 | 304.91 960 | 304.93 961 | 305.00 962 | 305.01 963 | 305.02 964 | 305.03 965 | 305.1 966 | 305.20 967 | 305.21 968 | 305.22 969 | 305.23 970 | 305.30 971 | 305.32 972 | 305.40 973 | 305.41 974 | 305.50 975 | 305.51 976 | 305.52 977 | 305.53 978 | 305.60 979 | 305.61 980 | 305.62 981 | 305.63 982 | 305.70 983 | 305.80 984 | 305.90 985 | 305.91 986 | 305.93 987 | 306.1 988 | 306.2 989 | 307.1 990 | 307.23 991 | 307.42 992 | 307.50 993 | 307.51 994 | 307.81 995 | 307.9 996 | 308.0 997 | 308.2 998 | 308.3 999 | 308.9 1000 | 309.0 1001 | 309.24 1002 | 309.28 1003 | 309.81 1004 | 309.89 1005 | 309.9 1006 | 310.0 1007 | 310.1 1008 | 310.2 1009 | 310.9 1010 | 311 1011 | 312.39 1012 | 312.9 1013 | 314.00 1014 | 314.01 1015 | 315.8 1016 | 315.9 1017 | 317 1018 | 318.0 1019 | 318.1 1020 | 318.2 1021 | 319 1022 | 320.0 1023 | 320.1 1024 | 320.2 1025 | 320.3 1026 | 320.7 1027 | 320.82 1028 | 320.9 1029 | 321.0 1030 | 322.0 1031 | 322.9 1032 | 323.4 1033 | 323.41 1034 | 323.6 1035 | 323.61 1036 | 323.62 1037 | 323.8 1038 | 323.81 1039 | 323.9 1040 | 324.0 1041 | 324.1 1042 | 324.9 1043 | 325 1044 | 326 1045 | 327.21 1046 | 327.23 1047 | 327.24 1048 | 327.26 1049 | 327.27 1050 | 330.8 1051 | 331.0 1052 | 331.19 1053 | 331.3 1054 | 331.4 1055 | 331.5 1056 | 331.82 1057 | 331.83 1058 | 331.89 1059 | 331.9 1060 | 332.0 1061 | 332.1 1062 | 333.0 1063 | 333.1 1064 | 333.2 1065 | 333.4 1066 | 333.5 1067 | 333.6 1068 | 333.7 1069 | 333.72 1070 | 333.82 1071 | 333.85 1072 | 333.91 1073 | 333.92 1074 | 333.94 1075 | 333.99 1076 | 334.0 1077 | 334.1 1078 | 334.8 1079 | 334.9 1080 | 335.10 1081 | 335.20 1082 | 335.22 1083 | 335.23 1084 | 336.0 1085 | 336.1 1086 | 336.3 1087 | 336.8 1088 | 336.9 1089 | 337.0 1090 | 337.1 1091 | 337.20 1092 | 337.21 1093 | 337.22 1094 | 337.29 1095 | 337.3 1096 | 337.9 1097 | 338.0 1098 | 338.11 1099 | 338.12 1100 | 338.18 1101 | 338.19 1102 | 338.28 1103 | 338.29 1104 | 338.3 1105 | 338.4 1106 | 340 1107 | 341.1 1108 | 341.20 1109 | 341.8 1110 | 341.9 1111 | 342.00 1112 | 342.01 1113 | 342.02 1114 | 342.10 1115 | 342.80 1116 | 342.81 1117 | 342.82 1118 | 342.90 1119 | 342.91 1120 | 342.92 1121 | 343.1 1122 | 343.2 1123 | 343.4 1124 | 343.8 1125 | 343.9 1126 | 344.00 1127 | 344.01 1128 | 344.02 1129 | 344.03 1130 | 344.04 1131 | 344.09 1132 | 344.1 1133 | 344.30 1134 | 344.40 1135 | 344.60 1136 | 344.61 1137 | 344.81 1138 | 344.89 1139 | 344.9 1140 | 345.00 1141 | 345.01 1142 | 345.10 1143 | 345.11 1144 | 345.3 1145 | 345.40 1146 | 345.41 1147 | 345.50 1148 | 345.51 1149 | 345.70 1150 | 345.71 1151 | 345.80 1152 | 345.90 1153 | 345.91 1154 | 346.00 1155 | 346.20 1156 | 346.80 1157 | 346.90 1158 | 347 1159 | 347.00 1160 | 348.0 1161 | 348.1 1162 | 348.2 1163 | 348.3 1164 | 348.30 1165 | 348.31 1166 | 348.39 1167 | 348.4 1168 | 348.5 1169 | 348.8 1170 | 348.9 1171 | 349.0 1172 | 349.1 1173 | 349.2 1174 | 349.81 1175 | 349.82 1176 | 349.89 1177 | 350.1 1178 | 350.2 1179 | 350.9 1180 | 351.0 1181 | 351.8 1182 | 351.9 1183 | 352.2 1184 | 352.6 1185 | 353.0 1186 | 353.6 1187 | 354.0 1188 | 354.1 1189 | 354.2 1190 | 354.3 1191 | 354.5 1192 | 354.8 1193 | 354.9 1194 | 355.1 1195 | 355.2 1196 | 355.3 1197 | 355.5 1198 | 355.6 1199 | 355.71 1200 | 355.79 1201 | 355.8 1202 | 355.9 1203 | 356.1 1204 | 356.2 1205 | 356.8 1206 | 356.9 1207 | 357.0 1208 | 357.2 1209 | 357.3 1210 | 357.4 1211 | 357.5 1212 | 357.6 1213 | 357.7 1214 | 357.8 1215 | 357.81 1216 | 357.82 1217 | 357.89 1218 | 358.0 1219 | 358.00 1220 | 358.01 1221 | 358.1 1222 | 358.8 1223 | 358.9 1224 | 359.1 1225 | 359.2 1226 | 359.21 1227 | 359.4 1228 | 359.81 1229 | 359.89 1230 | 359.9 1231 | 360.00 1232 | 360.01 1233 | 360.12 1234 | 360.19 1235 | 360.43 1236 | 361.01 1237 | 361.06 1238 | 361.89 1239 | 361.9 1240 | 362.01 1241 | 362.02 1242 | 362.03 1243 | 362.07 1244 | 362.10 1245 | 362.11 1246 | 362.17 1247 | 362.30 1248 | 362.31 1249 | 362.34 1250 | 362.50 1251 | 362.74 1252 | 362.81 1253 | 362.84 1254 | 363.20 1255 | 364.03 1256 | 364.3 1257 | 364.41 1258 | 364.9 1259 | 365.00 1260 | 365.10 1261 | 365.20 1262 | 365.22 1263 | 365.63 1264 | 365.65 1265 | 365.9 1266 | 366.16 1267 | 366.41 1268 | 366.8 1269 | 366.9 1270 | 367.1 1271 | 367.4 1272 | 368.11 1273 | 368.12 1274 | 368.13 1275 | 368.16 1276 | 368.2 1277 | 368.40 1278 | 368.41 1279 | 368.46 1280 | 368.8 1281 | 368.9 1282 | 369.00 1283 | 369.01 1284 | 369.3 1285 | 369.4 1286 | 369.60 1287 | 369.70 1288 | 369.8 1289 | 369.9 1290 | 370.00 1291 | 370.34 1292 | 370.9 1293 | 371.40 1294 | 372.00 1295 | 372.03 1296 | 372.30 1297 | 372.39 1298 | 372.72 1299 | 372.73 1300 | 372.75 1301 | 373.00 1302 | 373.11 1303 | 373.13 1304 | 374.10 1305 | 374.20 1306 | 374.30 1307 | 374.31 1308 | 374.43 1309 | 374.82 1310 | 374.9 1311 | 375.00 1312 | 375.15 1313 | 375.56 1314 | 376.01 1315 | 376.11 1316 | 376.12 1317 | 376.30 1318 | 376.32 1319 | 376.33 1320 | 376.35 1321 | 376.89 1322 | 376.9 1323 | 377.00 1324 | 377.01 1325 | 377.02 1326 | 377.16 1327 | 377.30 1328 | 377.39 1329 | 377.41 1330 | 377.49 1331 | 377.75 1332 | 378.00 1333 | 378.10 1334 | 378.51 1335 | 378.52 1336 | 378.54 1337 | 378.56 1338 | 378.71 1339 | 378.81 1340 | 378.9 1341 | 379.00 1342 | 379.09 1343 | 379.21 1344 | 379.23 1345 | 379.24 1346 | 379.40 1347 | 379.41 1348 | 379.43 1349 | 379.50 1350 | 379.56 1351 | 379.91 1352 | 379.92 1353 | 380.10 1354 | 380.15 1355 | 380.22 1356 | 380.4 1357 | 381.00 1358 | 382.00 1359 | 382.01 1360 | 382.02 1361 | 382.9 1362 | 383.00 1363 | 383.02 1364 | 383.1 1365 | 383.21 1366 | 383.9 1367 | 384.01 1368 | 384.20 1369 | 385.82 1370 | 385.89 1371 | 386.00 1372 | 386.10 1373 | 386.11 1374 | 386.12 1375 | 386.30 1376 | 386.9 1377 | 388.30 1378 | 388.32 1379 | 388.61 1380 | 388.69 1381 | 388.70 1382 | 388.72 1383 | 388.8 1384 | 389.00 1385 | 389.03 1386 | 389.10 1387 | 389.12 1388 | 389.15 1389 | 389.7 1390 | 389.8 1391 | 389.9 1392 | 391.0 1393 | 391.1 1394 | 391.8 1395 | 394.0 1396 | 394.1 1397 | 394.2 1398 | 394.9 1399 | 395.0 1400 | 395.1 1401 | 395.2 1402 | 395.9 1403 | 396.0 1404 | 396.1 1405 | 396.2 1406 | 396.3 1407 | 396.8 1408 | 397.0 1409 | 397.1 1410 | 398.90 1411 | 398.91 1412 | 401.0 1413 | 401.1 1414 | 401.9 1415 | 402.00 1416 | 402.01 1417 | 402.90 1418 | 402.91 1419 | 403.00 1420 | 403.01 1421 | 403.10 1422 | 403.11 1423 | 403.90 1424 | 403.91 1425 | 404.00 1426 | 404.01 1427 | 404.03 1428 | 404.13 1429 | 404.90 1430 | 404.91 1431 | 404.92 1432 | 404.93 1433 | 405.01 1434 | 405.91 1435 | 405.99 1436 | 410.00 1437 | 410.01 1438 | 410.02 1439 | 410.11 1440 | 410.12 1441 | 410.21 1442 | 410.22 1443 | 410.31 1444 | 410.41 1445 | 410.42 1446 | 410.51 1447 | 410.61 1448 | 410.71 1449 | 410.72 1450 | 410.81 1451 | 410.82 1452 | 410.90 1453 | 410.91 1454 | 410.92 1455 | 411.0 1456 | 411.1 1457 | 411.81 1458 | 411.89 1459 | 412 1460 | 413.1 1461 | 413.9 1462 | 414.00 1463 | 414.01 1464 | 414.02 1465 | 414.03 1466 | 414.04 1467 | 414.10 1468 | 414.11 1469 | 414.12 1470 | 414.19 1471 | 414.2 1472 | 414.8 1473 | 414.9 1474 | 415.0 1475 | 415.11 1476 | 415.12 1477 | 415.19 1478 | 416.0 1479 | 416.8 1480 | 416.9 1481 | 417.1 1482 | 417.8 1483 | 420.0 1484 | 420.90 1485 | 420.91 1486 | 420.99 1487 | 421.0 1488 | 421.9 1489 | 422.0 1490 | 422.90 1491 | 422.91 1492 | 422.92 1493 | 422.93 1494 | 423.0 1495 | 423.1 1496 | 423.2 1497 | 423.3 1498 | 423.8 1499 | 423.9 1500 | 424.0 1501 | 424.1 1502 | 424.2 1503 | 424.3 1504 | 424.90 1505 | 425.1 1506 | 425.3 1507 | 425.4 1508 | 425.5 1509 | 425.7 1510 | 425.8 1511 | 425.9 1512 | 426.0 1513 | 426.10 1514 | 426.11 1515 | 426.12 1516 | 426.13 1517 | 426.2 1518 | 426.3 1519 | 426.4 1520 | 426.50 1521 | 426.51 1522 | 426.52 1523 | 426.53 1524 | 426.6 1525 | 426.7 1526 | 426.82 1527 | 426.89 1528 | 426.9 1529 | 427.0 1530 | 427.1 1531 | 427.2 1532 | 427.31 1533 | 427.32 1534 | 427.41 1535 | 427.42 1536 | 427.5 1537 | 427.60 1538 | 427.61 1539 | 427.69 1540 | 427.81 1541 | 427.89 1542 | 427.9 1543 | 428.0 1544 | 428.1 1545 | 428.20 1546 | 428.21 1547 | 428.22 1548 | 428.23 1549 | 428.30 1550 | 428.31 1551 | 428.32 1552 | 428.33 1553 | 428.40 1554 | 428.41 1555 | 428.42 1556 | 428.43 1557 | 428.9 1558 | 429.0 1559 | 429.1 1560 | 429.2 1561 | 429.3 1562 | 429.4 1563 | 429.5 1564 | 429.6 1565 | 429.71 1566 | 429.79 1567 | 429.81 1568 | 429.83 1569 | 429.89 1570 | 429.9 1571 | 430 1572 | 431 1573 | 432.0 1574 | 432.1 1575 | 432.9 1576 | 433.00 1577 | 433.01 1578 | 433.10 1579 | 433.11 1580 | 433.20 1581 | 433.21 1582 | 433.30 1583 | 433.31 1584 | 433.80 1585 | 433.81 1586 | 433.91 1587 | 434.00 1588 | 434.01 1589 | 434.10 1590 | 434.11 1591 | 434.90 1592 | 434.91 1593 | 435.0 1594 | 435.1 1595 | 435.2 1596 | 435.3 1597 | 435.8 1598 | 435.9 1599 | 436 1600 | 437.0 1601 | 437.1 1602 | 437.2 1603 | 437.3 1604 | 437.4 1605 | 437.5 1606 | 437.6 1607 | 437.7 1608 | 437.8 1609 | 437.9 1610 | 438.0 1611 | 438.10 1612 | 438.11 1613 | 438.12 1614 | 438.19 1615 | 438.20 1616 | 438.21 1617 | 438.22 1618 | 438.30 1619 | 438.31 1620 | 438.40 1621 | 438.50 1622 | 438.52 1623 | 438.53 1624 | 438.6 1625 | 438.7 1626 | 438.82 1627 | 438.83 1628 | 438.84 1629 | 438.85 1630 | 438.89 1631 | 438.9 1632 | 440.0 1633 | 440.1 1634 | 440.20 1635 | 440.21 1636 | 440.22 1637 | 440.23 1638 | 440.24 1639 | 440.29 1640 | 440.30 1641 | 440.31 1642 | 440.32 1643 | 440.4 1644 | 440.8 1645 | 440.9 1646 | 441.00 1647 | 441.01 1648 | 441.02 1649 | 441.03 1650 | 441.1 1651 | 441.2 1652 | 441.3 1653 | 441.4 1654 | 441.6 1655 | 441.7 1656 | 441.9 1657 | 442.0 1658 | 442.1 1659 | 442.2 1660 | 442.3 1661 | 442.81 1662 | 442.82 1663 | 442.83 1664 | 442.84 1665 | 442.89 1666 | 443.0 1667 | 443.21 1668 | 443.22 1669 | 443.23 1670 | 443.24 1671 | 443.29 1672 | 443.81 1673 | 443.89 1674 | 443.9 1675 | 444.0 1676 | 444.1 1677 | 444.21 1678 | 444.22 1679 | 444.81 1680 | 444.89 1681 | 444.9 1682 | 445.02 1683 | 445.81 1684 | 445.89 1685 | 446.29 1686 | 446.4 1687 | 446.5 1688 | 446.6 1689 | 446.7 1690 | 447.0 1691 | 447.1 1692 | 447.2 1693 | 447.3 1694 | 447.4 1695 | 447.5 1696 | 447.6 1697 | 447.8 1698 | 447.9 1699 | 448.0 1700 | 448.9 1701 | 449 1702 | 451.0 1703 | 451.19 1704 | 451.82 1705 | 451.83 1706 | 451.84 1707 | 451.89 1708 | 451.9 1709 | 452 1710 | 453.0 1711 | 453.1 1712 | 453.2 1713 | 453.3 1714 | 453.40 1715 | 453.41 1716 | 453.42 1717 | 453.8 1718 | 453.9 1719 | 454.0 1720 | 454.1 1721 | 454.2 1722 | 454.8 1723 | 454.9 1724 | 455.0 1725 | 455.1 1726 | 455.2 1727 | 455.3 1728 | 455.4 1729 | 455.5 1730 | 455.6 1731 | 455.8 1732 | 456.0 1733 | 456.1 1734 | 456.20 1735 | 456.21 1736 | 456.8 1737 | 457.0 1738 | 457.1 1739 | 457.8 1740 | 458.0 1741 | 458.1 1742 | 458.2 1743 | 458.21 1744 | 458.29 1745 | 458.8 1746 | 458.9 1747 | 459.0 1748 | 459.2 1749 | 459.81 1750 | 459.89 1751 | 459.9 1752 | 461.0 1753 | 461.1 1754 | 461.2 1755 | 461.3 1756 | 461.8 1757 | 461.9 1758 | 462 1759 | 463 1760 | 464.00 1761 | 464.10 1762 | 464.11 1763 | 464.30 1764 | 464.31 1765 | 464.50 1766 | 464.51 1767 | 465.9 1768 | 466.0 1769 | 466.11 1770 | 466.19 1771 | 470 1772 | 471.0 1773 | 471.8 1774 | 472.0 1775 | 473.0 1776 | 473.1 1777 | 473.2 1778 | 473.3 1779 | 473.8 1780 | 473.9 1781 | 474.11 1782 | 474.12 1783 | 474.8 1784 | 475 1785 | 477.0 1786 | 477.9 1787 | 478.0 1788 | 478.1 1789 | 478.19 1790 | 478.20 1791 | 478.21 1792 | 478.22 1793 | 478.24 1794 | 478.25 1795 | 478.29 1796 | 478.30 1797 | 478.31 1798 | 478.32 1799 | 478.33 1800 | 478.34 1801 | 478.4 1802 | 478.5 1803 | 478.6 1804 | 478.70 1805 | 478.74 1806 | 478.75 1807 | 478.79 1808 | 478.9 1809 | 480.1 1810 | 480.8 1811 | 480.9 1812 | 481 1813 | 482.0 1814 | 482.1 1815 | 482.2 1816 | 482.30 1817 | 482.31 1818 | 482.32 1819 | 482.39 1820 | 482.40 1821 | 482.41 1822 | 482.49 1823 | 482.81 1824 | 482.82 1825 | 482.83 1826 | 482.84 1827 | 482.89 1828 | 482.9 1829 | 483.0 1830 | 483.8 1831 | 484.1 1832 | 484.3 1833 | 484.6 1834 | 484.7 1835 | 484.8 1836 | 485 1837 | 486 1838 | 487.0 1839 | 487.1 1840 | 490 1841 | 491.20 1842 | 491.21 1843 | 491.22 1844 | 491.8 1845 | 491.9 1846 | 492.0 1847 | 492.8 1848 | 493.00 1849 | 493.01 1850 | 493.02 1851 | 493.20 1852 | 493.21 1853 | 493.22 1854 | 493.81 1855 | 493.90 1856 | 493.91 1857 | 493.92 1858 | 494.0 1859 | 494.1 1860 | 495.9 1861 | 496 1862 | 500 1863 | 501 1864 | 502 1865 | 506.0 1866 | 507.0 1867 | 507.1 1868 | 507.8 1869 | 508.0 1870 | 508.1 1871 | 508.8 1872 | 510.0 1873 | 510.9 1874 | 511.0 1875 | 511.1 1876 | 511.8 1877 | 511.9 1878 | 512.0 1879 | 512.1 1880 | 512.8 1881 | 513.0 1882 | 513.1 1883 | 514 1884 | 515 1885 | 516.0 1886 | 516.1 1887 | 516.3 1888 | 516.8 1889 | 516.9 1890 | 517.2 1891 | 517.3 1892 | 517.8 1893 | 518.0 1894 | 518.1 1895 | 518.3 1896 | 518.4 1897 | 518.5 1898 | 518.7 1899 | 518.81 1900 | 518.82 1901 | 518.83 1902 | 518.84 1903 | 518.89 1904 | 519.00 1905 | 519.01 1906 | 519.02 1907 | 519.09 1908 | 519.1 1909 | 519.11 1910 | 519.19 1911 | 519.2 1912 | 519.3 1913 | 519.4 1914 | 519.8 1915 | 520.0 1916 | 520.6 1917 | 521.00 1918 | 521.01 1919 | 521.08 1920 | 521.09 1921 | 521.9 1922 | 522.4 1923 | 522.5 1924 | 522.6 1925 | 523.3 1926 | 523.30 1927 | 523.33 1928 | 523.4 1929 | 523.40 1930 | 523.8 1931 | 523.9 1932 | 524.60 1933 | 524.69 1934 | 525.10 1935 | 525.11 1936 | 525.12 1937 | 525.3 1938 | 525.50 1939 | 525.8 1940 | 525.9 1941 | 526.4 1942 | 526.89 1943 | 526.9 1944 | 527.2 1945 | 527.3 1946 | 527.5 1947 | 527.7 1948 | 528.0 1949 | 528.00 1950 | 528.01 1951 | 528.09 1952 | 528.2 1953 | 528.3 1954 | 528.5 1955 | 528.6 1956 | 528.9 1957 | 529.0 1958 | 529.8 1959 | 530.0 1960 | 530.10 1961 | 530.11 1962 | 530.12 1963 | 530.19 1964 | 530.2 1965 | 530.20 1966 | 530.21 1967 | 530.3 1968 | 530.4 1969 | 530.5 1970 | 530.6 1971 | 530.7 1972 | 530.81 1973 | 530.82 1974 | 530.84 1975 | 530.85 1976 | 530.87 1977 | 530.89 1978 | 530.9 1979 | 531.00 1980 | 531.01 1981 | 531.10 1982 | 531.11 1983 | 531.40 1984 | 531.50 1985 | 531.70 1986 | 531.90 1987 | 532.00 1988 | 532.01 1989 | 532.10 1990 | 532.20 1991 | 532.30 1992 | 532.40 1993 | 532.41 1994 | 532.50 1995 | 532.51 1996 | 532.60 1997 | 532.70 1998 | 532.90 1999 | 533.00 2000 | 533.40 2001 | 533.41 2002 | 533.70 2003 | 533.90 2004 | 534.00 2005 | 534.40 2006 | 534.41 2007 | 534.50 2008 | 534.90 2009 | 535.00 2010 | 535.01 2011 | 535.10 2012 | 535.11 2013 | 535.21 2014 | 535.30 2015 | 535.31 2016 | 535.40 2017 | 535.41 2018 | 535.50 2019 | 535.51 2020 | 535.60 2021 | 535.61 2022 | 536.1 2023 | 536.2 2024 | 536.3 2025 | 536.41 2026 | 536.42 2027 | 536.49 2028 | 536.8 2029 | 537.0 2030 | 537.1 2031 | 537.3 2032 | 537.4 2033 | 537.82 2034 | 537.83 2035 | 537.84 2036 | 537.89 2037 | 537.9 2038 | 540.0 2039 | 540.1 2040 | 540.9 2041 | 541 2042 | 543.9 2043 | 550.10 2044 | 550.12 2045 | 550.90 2046 | 550.92 2047 | 551.20 2048 | 551.29 2049 | 551.3 2050 | 552.00 2051 | 552.1 2052 | 552.20 2053 | 552.21 2054 | 552.29 2055 | 552.3 2056 | 552.8 2057 | 552.9 2058 | 553.00 2059 | 553.1 2060 | 553.20 2061 | 553.21 2062 | 553.29 2063 | 553.3 2064 | 553.8 2065 | 553.9 2066 | 555.0 2067 | 555.1 2068 | 555.2 2069 | 555.9 2070 | 556.0 2071 | 556.1 2072 | 556.3 2073 | 556.4 2074 | 556.6 2075 | 556.8 2076 | 556.9 2077 | 557.0 2078 | 557.1 2079 | 557.9 2080 | 558.1 2081 | 558.2 2082 | 558.9 2083 | 560.0 2084 | 560.1 2085 | 560.2 2086 | 560.30 2087 | 560.31 2088 | 560.39 2089 | 560.81 2090 | 560.89 2091 | 560.9 2092 | 562.00 2093 | 562.01 2094 | 562.02 2095 | 562.10 2096 | 562.11 2097 | 562.12 2098 | 562.13 2099 | 564.0 2100 | 564.00 2101 | 564.01 2102 | 564.09 2103 | 564.1 2104 | 564.2 2105 | 564.7 2106 | 564.81 2107 | 564.89 2108 | 565.0 2109 | 565.1 2110 | 566 2111 | 567.0 2112 | 567.2 2113 | 567.21 2114 | 567.22 2115 | 567.23 2116 | 567.29 2117 | 567.31 2118 | 567.38 2119 | 567.8 2120 | 567.81 2121 | 567.82 2122 | 567.89 2123 | 567.9 2124 | 568.0 2125 | 568.81 2126 | 568.82 2127 | 568.89 2128 | 569.0 2129 | 569.1 2130 | 569.2 2131 | 569.3 2132 | 569.41 2133 | 569.42 2134 | 569.49 2135 | 569.5 2136 | 569.61 2137 | 569.62 2138 | 569.69 2139 | 569.81 2140 | 569.82 2141 | 569.83 2142 | 569.84 2143 | 569.85 2144 | 569.86 2145 | 569.89 2146 | 569.9 2147 | 570 2148 | 571.0 2149 | 571.1 2150 | 571.2 2151 | 571.3 2152 | 571.40 2153 | 571.49 2154 | 571.5 2155 | 571.6 2156 | 571.8 2157 | 571.9 2158 | 572.0 2159 | 572.1 2160 | 572.2 2161 | 572.3 2162 | 572.4 2163 | 572.8 2164 | 573.0 2165 | 573.1 2166 | 573.3 2167 | 573.4 2168 | 573.8 2169 | 573.9 2170 | 574.00 2171 | 574.01 2172 | 574.10 2173 | 574.11 2174 | 574.20 2175 | 574.21 2176 | 574.30 2177 | 574.31 2178 | 574.40 2179 | 574.41 2180 | 574.50 2181 | 574.51 2182 | 574.60 2183 | 574.61 2184 | 574.70 2185 | 574.71 2186 | 574.80 2187 | 574.81 2188 | 574.90 2189 | 574.91 2190 | 575.0 2191 | 575.10 2192 | 575.11 2193 | 575.12 2194 | 575.4 2195 | 575.5 2196 | 575.6 2197 | 575.8 2198 | 575.9 2199 | 576.0 2200 | 576.1 2201 | 576.2 2202 | 576.3 2203 | 576.4 2204 | 576.8 2205 | 576.9 2206 | 577.0 2207 | 577.1 2208 | 577.2 2209 | 577.8 2210 | 577.9 2211 | 578.0 2212 | 578.1 2213 | 578.9 2214 | 579.0 2215 | 579.3 2216 | 579.8 2217 | 579.9 2218 | 580.0 2219 | 580.4 2220 | 580.81 2221 | 580.89 2222 | 580.9 2223 | 581.1 2224 | 581.2 2225 | 581.81 2226 | 581.89 2227 | 581.9 2228 | 582.1 2229 | 582.2 2230 | 582.81 2231 | 582.89 2232 | 582.9 2233 | 583.0 2234 | 583.2 2235 | 583.81 2236 | 583.89 2237 | 583.9 2238 | 584.5 2239 | 584.6 2240 | 584.7 2241 | 584.8 2242 | 584.9 2243 | 585 2244 | 585.1 2245 | 585.2 2246 | 585.3 2247 | 585.4 2248 | 585.5 2249 | 585.6 2250 | 585.9 2251 | 586 2252 | 587 2253 | 588.0 2254 | 588.1 2255 | 588.8 2256 | 588.81 2257 | 588.89 2258 | 590.00 2259 | 590.10 2260 | 590.11 2261 | 590.2 2262 | 590.80 2263 | 590.9 2264 | 591 2265 | 592.0 2266 | 592.1 2267 | 592.9 2268 | 593.2 2269 | 593.3 2270 | 593.4 2271 | 593.5 2272 | 593.81 2273 | 593.82 2274 | 593.89 2275 | 593.9 2276 | 594.0 2277 | 594.1 2278 | 594.2 2279 | 594.9 2280 | 595.0 2281 | 595.1 2282 | 595.2 2283 | 595.81 2284 | 595.82 2285 | 595.89 2286 | 595.9 2287 | 596.0 2288 | 596.1 2289 | 596.3 2290 | 596.4 2291 | 596.51 2292 | 596.54 2293 | 596.55 2294 | 596.59 2295 | 596.6 2296 | 596.7 2297 | 596.8 2298 | 596.9 2299 | 597.0 2300 | 597.80 2301 | 597.89 2302 | 598.00 2303 | 598.1 2304 | 598.2 2305 | 598.8 2306 | 598.9 2307 | 599.0 2308 | 599.4 2309 | 599.6 2310 | 599.60 2311 | 599.69 2312 | 599.7 2313 | 600.0 2314 | 600.00 2315 | 600.01 2316 | 600.10 2317 | 600.2 2318 | 600.9 2319 | 600.90 2320 | 600.91 2321 | 601.0 2322 | 601.1 2323 | 601.2 2324 | 601.8 2325 | 601.9 2326 | 602.3 2327 | 602.8 2328 | 603.1 2329 | 603.8 2330 | 603.9 2331 | 604.0 2332 | 604.90 2333 | 605 2334 | 607.1 2335 | 607.2 2336 | 607.82 2337 | 607.83 2338 | 607.84 2339 | 607.89 2340 | 607.9 2341 | 608.4 2342 | 608.83 2343 | 608.86 2344 | 608.89 2345 | 608.9 2346 | 610.1 2347 | 611.0 2348 | 611.1 2349 | 611.6 2350 | 611.71 2351 | 611.72 2352 | 611.8 2353 | 614.0 2354 | 614.1 2355 | 614.2 2356 | 614.3 2357 | 614.4 2358 | 614.5 2359 | 614.6 2360 | 614.9 2361 | 615.0 2362 | 615.1 2363 | 615.9 2364 | 616.0 2365 | 616.10 2366 | 616.2 2367 | 616.50 2368 | 616.8 2369 | 617.0 2370 | 617.1 2371 | 617.2 2372 | 617.3 2373 | 617.5 2374 | 617.8 2375 | 617.9 2376 | 618.0 2377 | 618.01 2378 | 618.04 2379 | 618.1 2380 | 618.2 2381 | 618.3 2382 | 618.4 2383 | 618.5 2384 | 618.8 2385 | 619.0 2386 | 619.1 2387 | 619.8 2388 | 620.0 2389 | 620.1 2390 | 620.2 2391 | 620.3 2392 | 620.5 2393 | 620.8 2394 | 620.9 2395 | 621.0 2396 | 621.3 2397 | 621.30 2398 | 621.4 2399 | 621.8 2400 | 622.1 2401 | 622.10 2402 | 623.5 2403 | 623.8 2404 | 624.8 2405 | 624.9 2406 | 625.3 2407 | 625.5 2408 | 625.6 2409 | 625.8 2410 | 625.9 2411 | 626.2 2412 | 626.4 2413 | 626.6 2414 | 626.8 2415 | 626.9 2416 | 627.0 2417 | 627.1 2418 | 627.3 2419 | 627.8 2420 | 629.89 2421 | 632 2422 | 633.1 2423 | 633.10 2424 | 633.11 2425 | 633.20 2426 | 633.80 2427 | 634.01 2428 | 634.11 2429 | 634.51 2430 | 634.91 2431 | 635.02 2432 | 635.12 2433 | 635.22 2434 | 635.52 2435 | 635.72 2436 | 635.92 2437 | 639.1 2438 | 639.2 2439 | 639.6 2440 | 639.8 2441 | 641.11 2442 | 641.13 2443 | 641.21 2444 | 641.31 2445 | 641.33 2446 | 642.01 2447 | 642.03 2448 | 642.04 2449 | 642.24 2450 | 642.31 2451 | 642.32 2452 | 642.34 2453 | 642.41 2454 | 642.44 2455 | 642.51 2456 | 642.54 2457 | 642.61 2458 | 642.64 2459 | 642.71 2460 | 642.92 2461 | 643.03 2462 | 643.13 2463 | 644.03 2464 | 644.21 2465 | 645.11 2466 | 646.12 2467 | 646.21 2468 | 646.22 2469 | 646.61 2470 | 646.62 2471 | 646.63 2472 | 646.64 2473 | 646.81 2474 | 646.82 2475 | 646.83 2476 | 647.61 2477 | 647.81 2478 | 647.82 2479 | 647.83 2480 | 647.84 2481 | 648.01 2482 | 648.03 2483 | 648.04 2484 | 648.11 2485 | 648.13 2486 | 648.14 2487 | 648.21 2488 | 648.22 2489 | 648.23 2490 | 648.24 2491 | 648.31 2492 | 648.41 2493 | 648.42 2494 | 648.43 2495 | 648.44 2496 | 648.61 2497 | 648.62 2498 | 648.63 2499 | 648.64 2500 | 648.81 2501 | 648.83 2502 | 648.91 2503 | 648.92 2504 | 648.93 2505 | 648.94 2506 | 649.03 2507 | 649.31 2508 | 649.44 2509 | 651.01 2510 | 652.21 2511 | 652.23 2512 | 652.61 2513 | 653.41 2514 | 654.04 2515 | 654.11 2516 | 654.21 2517 | 654.23 2518 | 654.41 2519 | 654.42 2520 | 654.44 2521 | 654.51 2522 | 655.53 2523 | 655.71 2524 | 655.83 2525 | 656.11 2526 | 656.13 2527 | 656.41 2528 | 656.51 2529 | 656.61 2530 | 656.71 2531 | 656.81 2532 | 657.01 2533 | 658.01 2534 | 658.11 2535 | 658.21 2536 | 658.41 2537 | 659.11 2538 | 659.21 2539 | 659.31 2540 | 659.41 2541 | 659.51 2542 | 659.61 2543 | 659.63 2544 | 659.71 2545 | 659.81 2546 | 660.01 2547 | 660.21 2548 | 661.01 2549 | 661.11 2550 | 661.21 2551 | 661.31 2552 | 663.31 2553 | 664.01 2554 | 664.11 2555 | 664.21 2556 | 665.11 2557 | 665.24 2558 | 665.31 2559 | 665.34 2560 | 665.41 2561 | 665.51 2562 | 665.61 2563 | 665.72 2564 | 665.81 2565 | 665.82 2566 | 666.02 2567 | 666.04 2568 | 666.12 2569 | 666.14 2570 | 666.22 2571 | 666.24 2572 | 666.32 2573 | 666.34 2574 | 668.11 2575 | 669.02 2576 | 669.11 2577 | 669.21 2578 | 669.22 2579 | 669.24 2580 | 669.32 2581 | 669.34 2582 | 669.41 2583 | 669.42 2584 | 669.44 2585 | 669.81 2586 | 670.02 2587 | 670.04 2588 | 671.31 2589 | 671.42 2590 | 671.53 2591 | 671.54 2592 | 671.81 2593 | 672.02 2594 | 672.04 2595 | 673.11 2596 | 673.22 2597 | 673.23 2598 | 673.24 2599 | 673.33 2600 | 674.02 2601 | 674.03 2602 | 674.04 2603 | 674.12 2604 | 674.14 2605 | 674.22 2606 | 674.32 2607 | 674.34 2608 | 674.51 2609 | 674.52 2610 | 674.54 2611 | 674.82 2612 | 674.84 2613 | 680.2 2614 | 680.5 2615 | 680.6 2616 | 681.00 2617 | 681.10 2618 | 681.11 2619 | 682.0 2620 | 682.1 2621 | 682.2 2622 | 682.3 2623 | 682.4 2624 | 682.5 2625 | 682.6 2626 | 682.7 2627 | 682.8 2628 | 682.9 2629 | 683 2630 | 685.1 2631 | 686.01 2632 | 686.09 2633 | 686.1 2634 | 686.9 2635 | 690.10 2636 | 691.8 2637 | 692.4 2638 | 692.6 2639 | 692.82 2640 | 692.9 2641 | 693.0 2642 | 693.1 2643 | 693.8 2644 | 694.5 2645 | 694.8 2646 | 695.1 2647 | 695.2 2648 | 695.3 2649 | 695.4 2650 | 695.89 2651 | 695.9 2652 | 696.0 2653 | 696.1 2654 | 696.2 2655 | 696.3 2656 | 697.9 2657 | 698.1 2658 | 698.3 2659 | 698.4 2660 | 698.8 2661 | 698.9 2662 | 701.0 2663 | 701.1 2664 | 701.5 2665 | 701.8 2666 | 702.0 2667 | 702.19 2668 | 702.8 2669 | 703.8 2670 | 704.00 2671 | 704.09 2672 | 704.1 2673 | 704.8 2674 | 705.1 2675 | 705.21 2676 | 705.83 2677 | 706.1 2678 | 706.2 2679 | 707.0 2680 | 707.00 2681 | 707.01 2682 | 707.02 2683 | 707.03 2684 | 707.04 2685 | 707.05 2686 | 707.06 2687 | 707.07 2688 | 707.09 2689 | 707.10 2690 | 707.11 2691 | 707.12 2692 | 707.13 2693 | 707.14 2694 | 707.15 2695 | 707.19 2696 | 707.8 2697 | 707.9 2698 | 708.0 2699 | 708.3 2700 | 708.8 2701 | 709.01 2702 | 709.09 2703 | 709.2 2704 | 709.3 2705 | 709.8 2706 | 709.9 2707 | 710.0 2708 | 710.1 2709 | 710.2 2710 | 710.3 2711 | 710.4 2712 | 710.8 2713 | 710.9 2714 | 711.01 2715 | 711.02 2716 | 711.03 2717 | 711.04 2718 | 711.05 2719 | 711.06 2720 | 711.07 2721 | 711.09 2722 | 711.55 2723 | 711.80 2724 | 712.13 2725 | 712.16 2726 | 712.22 2727 | 712.26 2728 | 712.30 2729 | 712.33 2730 | 712.35 2731 | 712.36 2732 | 712.38 2733 | 713.1 2734 | 713.2 2735 | 713.5 2736 | 714.0 2737 | 714.1 2738 | 714.30 2739 | 714.32 2740 | 715.15 2741 | 715.16 2742 | 715.31 2743 | 715.34 2744 | 715.35 2745 | 715.36 2746 | 715.37 2747 | 715.89 2748 | 715.90 2749 | 715.91 2750 | 715.94 2751 | 715.95 2752 | 715.96 2753 | 715.98 2754 | 716.87 2755 | 716.89 2756 | 716.90 2757 | 716.91 2758 | 716.93 2759 | 716.95 2760 | 716.96 2761 | 716.97 2762 | 716.98 2763 | 716.99 2764 | 718.15 2765 | 718.28 2766 | 718.31 2767 | 718.44 2768 | 718.46 2769 | 718.47 2770 | 718.87 2771 | 718.88 2772 | 718.95 2773 | 719.02 2774 | 719.03 2775 | 719.06 2776 | 719.07 2777 | 719.09 2778 | 719.16 2779 | 719.26 2780 | 719.40 2781 | 719.41 2782 | 719.42 2783 | 719.43 2784 | 719.45 2785 | 719.46 2786 | 719.47 2787 | 719.49 2788 | 719.66 2789 | 719.7 2790 | 719.70 2791 | 719.86 2792 | 720.0 2793 | 720.2 2794 | 720.9 2795 | 721.0 2796 | 721.1 2797 | 721.2 2798 | 721.3 2799 | 721.41 2800 | 721.42 2801 | 721.7 2802 | 721.8 2803 | 721.90 2804 | 722.0 2805 | 722.10 2806 | 722.11 2807 | 722.4 2808 | 722.52 2809 | 722.6 2810 | 722.71 2811 | 722.72 2812 | 722.73 2813 | 722.83 2814 | 722.90 2815 | 722.91 2816 | 722.92 2817 | 722.93 2818 | 723.0 2819 | 723.1 2820 | 723.4 2821 | 723.5 2822 | 723.6 2823 | 723.7 2824 | 723.8 2825 | 724.00 2826 | 724.01 2827 | 724.02 2828 | 724.2 2829 | 724.3 2830 | 724.4 2831 | 724.5 2832 | 724.8 2833 | 724.9 2834 | 725 2835 | 726.0 2836 | 726.10 2837 | 726.11 2838 | 726.12 2839 | 726.2 2840 | 726.33 2841 | 726.5 2842 | 726.60 2843 | 726.65 2844 | 726.69 2845 | 726.71 2846 | 726.72 2847 | 726.90 2848 | 726.91 2849 | 727.00 2850 | 727.03 2851 | 727.04 2852 | 727.05 2853 | 727.3 2854 | 727.40 2855 | 727.41 2856 | 727.51 2857 | 727.61 2858 | 727.81 2859 | 727.82 2860 | 727.89 2861 | 728.0 2862 | 728.2 2863 | 728.4 2864 | 728.6 2865 | 728.71 2866 | 728.85 2867 | 728.86 2868 | 728.87 2869 | 728.88 2870 | 728.89 2871 | 728.9 2872 | 729.1 2873 | 729.2 2874 | 729.30 2875 | 729.39 2876 | 729.4 2877 | 729.5 2878 | 729.6 2879 | 729.71 2880 | 729.72 2881 | 729.73 2882 | 729.81 2883 | 729.82 2884 | 729.89 2885 | 729.9 2886 | 730.01 2887 | 730.04 2888 | 730.05 2889 | 730.07 2890 | 730.08 2891 | 730.09 2892 | 730.12 2893 | 730.13 2894 | 730.15 2895 | 730.16 2896 | 730.17 2897 | 730.18 2898 | 730.19 2899 | 730.20 2900 | 730.22 2901 | 730.25 2902 | 730.26 2903 | 730.27 2904 | 730.28 2905 | 730.88 2906 | 730.89 2907 | 731.0 2908 | 731.3 2909 | 731.8 2910 | 732.1 2911 | 732.5 2912 | 733.00 2913 | 733.01 2914 | 733.02 2915 | 733.09 2916 | 733.11 2917 | 733.13 2918 | 733.14 2919 | 733.15 2920 | 733.16 2921 | 733.19 2922 | 733.20 2923 | 733.29 2924 | 733.42 2925 | 733.49 2926 | 733.6 2927 | 733.81 2928 | 733.82 2929 | 733.90 2930 | 733.99 2931 | 734 2932 | 735.0 2933 | 735.4 2934 | 735.8 2935 | 735.9 2936 | 736.09 2937 | 736.29 2938 | 736.6 2939 | 736.70 2940 | 736.71 2941 | 736.72 2942 | 736.79 2943 | 736.89 2944 | 737.10 2945 | 737.12 2946 | 737.19 2947 | 737.22 2948 | 737.30 2949 | 737.34 2950 | 737.39 2951 | 737.41 2952 | 737.43 2953 | 738.0 2954 | 738.19 2955 | 738.3 2956 | 738.4 2957 | 741.00 2958 | 741.01 2959 | 741.90 2960 | 741.93 2961 | 742.0 2962 | 742.2 2963 | 742.3 2964 | 742.4 2965 | 742.8 2966 | 742.9 2967 | 743.20 2968 | 743.61 2969 | 745.10 2970 | 745.12 2971 | 745.2 2972 | 745.4 2973 | 745.5 2974 | 745.60 2975 | 745.61 2976 | 745.8 2977 | 746.02 2978 | 746.1 2979 | 746.2 2980 | 746.3 2981 | 746.4 2982 | 746.85 2983 | 746.86 2984 | 746.89 2985 | 746.9 2986 | 747.0 2987 | 747.10 2988 | 747.21 2989 | 747.22 2990 | 747.29 2991 | 747.3 2992 | 747.40 2993 | 747.41 2994 | 747.42 2995 | 747.49 2996 | 747.61 2997 | 747.62 2998 | 747.63 2999 | 747.69 3000 | 747.81 3001 | 747.82 3002 | 748.2 3003 | 748.3 3004 | 748.8 3005 | 750.3 3006 | 750.4 3007 | 750.9 3008 | 751.0 3009 | 751.3 3010 | 751.4 3011 | 751.5 3012 | 751.62 3013 | 751.69 3014 | 751.7 3015 | 752.3 3016 | 752.61 3017 | 753.0 3018 | 753.10 3019 | 753.12 3020 | 753.13 3021 | 753.19 3022 | 753.29 3023 | 753.3 3024 | 753.4 3025 | 754.2 3026 | 754.61 3027 | 754.70 3028 | 754.82 3029 | 754.89 3030 | 755.26 3031 | 755.50 3032 | 755.57 3033 | 755.59 3034 | 755.63 3035 | 755.64 3036 | 755.67 3037 | 756.10 3038 | 756.12 3039 | 756.14 3040 | 756.17 3041 | 756.19 3042 | 756.51 3043 | 756.83 3044 | 756.89 3045 | 757.0 3046 | 757.39 3047 | 758.0 3048 | 758.1 3049 | 758.5 3050 | 758.6 3051 | 758.7 3052 | 758.89 3053 | 758.9 3054 | 759.3 3055 | 759.5 3056 | 759.6 3057 | 759.81 3058 | 759.82 3059 | 759.89 3060 | 780.01 3061 | 780.03 3062 | 780.09 3063 | 780.1 3064 | 780.2 3065 | 780.39 3066 | 780.4 3067 | 780.50 3068 | 780.51 3069 | 780.52 3070 | 780.54 3071 | 780.55 3072 | 780.57 3073 | 780.6 3074 | 780.71 3075 | 780.79 3076 | 780.8 3077 | 780.9 3078 | 780.93 3079 | 780.94 3080 | 780.96 3081 | 780.97 3082 | 780.99 3083 | 781.0 3084 | 781.1 3085 | 781.2 3086 | 781.3 3087 | 781.7 3088 | 781.8 3089 | 781.94 3090 | 781.99 3091 | 782.0 3092 | 782.1 3093 | 782.2 3094 | 782.3 3095 | 782.4 3096 | 782.5 3097 | 782.62 3098 | 782.7 3099 | 783.0 3100 | 783.1 3101 | 783.21 3102 | 783.40 3103 | 783.5 3104 | 783.6 3105 | 783.7 3106 | 784.0 3107 | 784.1 3108 | 784.2 3109 | 784.3 3110 | 784.41 3111 | 784.49 3112 | 784.5 3113 | 784.69 3114 | 784.7 3115 | 785.0 3116 | 785.1 3117 | 785.2 3118 | 785.4 3119 | 785.50 3120 | 785.51 3121 | 785.52 3122 | 785.59 3123 | 785.6 3124 | 785.9 3125 | 786.01 3126 | 786.02 3127 | 786.03 3128 | 786.04 3129 | 786.05 3130 | 786.06 3131 | 786.07 3132 | 786.09 3133 | 786.1 3134 | 786.2 3135 | 786.3 3136 | 786.4 3137 | 786.50 3138 | 786.51 3139 | 786.52 3140 | 786.59 3141 | 786.6 3142 | 786.8 3143 | 787.01 3144 | 787.02 3145 | 787.03 3146 | 787.1 3147 | 787.2 3148 | 787.20 3149 | 787.21 3150 | 787.22 3151 | 787.23 3152 | 787.29 3153 | 787.3 3154 | 787.6 3155 | 787.91 3156 | 787.99 3157 | 788.1 3158 | 788.20 3159 | 788.21 3160 | 788.29 3161 | 788.30 3162 | 788.31 3163 | 788.32 3164 | 788.37 3165 | 788.38 3166 | 788.39 3167 | 788.41 3168 | 788.42 3169 | 788.5 3170 | 788.69 3171 | 788.8 3172 | 789.00 3173 | 789.01 3174 | 789.02 3175 | 789.03 3176 | 789.04 3177 | 789.06 3178 | 789.07 3179 | 789.09 3180 | 789.1 3181 | 789.2 3182 | 789.30 3183 | 789.34 3184 | 789.39 3185 | 789.40 3186 | 789.5 3187 | 789.51 3188 | 789.59 3189 | 790.01 3190 | 790.09 3191 | 790.1 3192 | 790.2 3193 | 790.22 3194 | 790.29 3195 | 790.4 3196 | 790.5 3197 | 790.6 3198 | 790.7 3199 | 790.8 3200 | 790.92 3201 | 790.93 3202 | 790.94 3203 | 790.99 3204 | 791.0 3205 | 791.2 3206 | 791.3 3207 | 791.5 3208 | 791.6 3209 | 791.9 3210 | 792.0 3211 | 792.1 3212 | 792.9 3213 | 793.0 3214 | 793.1 3215 | 793.2 3216 | 793.3 3217 | 793.4 3218 | 793.5 3219 | 793.7 3220 | 793.80 3221 | 794.02 3222 | 794.09 3223 | 794.2 3224 | 794.31 3225 | 794.39 3226 | 794.4 3227 | 794.5 3228 | 794.6 3229 | 794.8 3230 | 794.9 3231 | 795.09 3232 | 795.5 3233 | 795.79 3234 | 795.89 3235 | 796.0 3236 | 796.1 3237 | 796.2 3238 | 796.3 3239 | 796.4 3240 | 799.0 3241 | 799.02 3242 | 799.1 3243 | 799.2 3244 | 799.3 3245 | 799.4 3246 | 799.89 3247 | 800.00 3248 | 800.01 3249 | 800.06 3250 | 800.09 3251 | 800.10 3252 | 800.12 3253 | 800.15 3254 | 800.16 3255 | 800.20 3256 | 800.21 3257 | 800.22 3258 | 800.23 3259 | 800.24 3260 | 800.25 3261 | 800.26 3262 | 800.29 3263 | 800.30 3264 | 800.31 3265 | 800.32 3266 | 800.61 3267 | 800.70 3268 | 800.71 3269 | 800.75 3270 | 800.76 3271 | 800.82 3272 | 800.85 3273 | 801.00 3274 | 801.01 3275 | 801.02 3276 | 801.05 3277 | 801.06 3278 | 801.09 3279 | 801.10 3280 | 801.11 3281 | 801.12 3282 | 801.14 3283 | 801.15 3284 | 801.16 3285 | 801.20 3286 | 801.21 3287 | 801.22 3288 | 801.24 3289 | 801.25 3290 | 801.26 3291 | 801.30 3292 | 801.31 3293 | 801.32 3294 | 801.35 3295 | 801.36 3296 | 801.41 3297 | 801.42 3298 | 801.45 3299 | 801.51 3300 | 801.52 3301 | 801.60 3302 | 801.62 3303 | 801.64 3304 | 801.65 3305 | 801.72 3306 | 801.74 3307 | 801.80 3308 | 801.82 3309 | 801.96 3310 | 802.0 3311 | 802.1 3312 | 802.20 3313 | 802.21 3314 | 802.22 3315 | 802.23 3316 | 802.24 3317 | 802.25 3318 | 802.26 3319 | 802.27 3320 | 802.28 3321 | 802.29 3322 | 802.31 3323 | 802.32 3324 | 802.35 3325 | 802.36 3326 | 802.38 3327 | 802.39 3328 | 802.4 3329 | 802.5 3330 | 802.6 3331 | 802.7 3332 | 802.8 3333 | 802.9 3334 | 803.01 3335 | 803.06 3336 | 803.11 3337 | 803.12 3338 | 803.15 3339 | 803.16 3340 | 803.20 3341 | 803.21 3342 | 803.22 3343 | 803.24 3344 | 803.25 3345 | 803.26 3346 | 803.32 3347 | 803.36 3348 | 803.41 3349 | 803.50 3350 | 803.60 3351 | 803.75 3352 | 804.00 3353 | 804.10 3354 | 804.12 3355 | 804.16 3356 | 804.20 3357 | 804.21 3358 | 804.22 3359 | 804.23 3360 | 804.25 3361 | 804.26 3362 | 804.30 3363 | 804.32 3364 | 804.35 3365 | 804.36 3366 | 804.41 3367 | 804.66 3368 | 804.70 3369 | 804.73 3370 | 804.85 3371 | 805.00 3372 | 805.01 3373 | 805.02 3374 | 805.03 3375 | 805.04 3376 | 805.05 3377 | 805.06 3378 | 805.07 3379 | 805.08 3380 | 805.2 3381 | 805.4 3382 | 805.6 3383 | 805.8 3384 | 806.00 3385 | 806.01 3386 | 806.02 3387 | 806.03 3388 | 806.04 3389 | 806.05 3390 | 806.06 3391 | 806.07 3392 | 806.08 3393 | 806.09 3394 | 806.16 3395 | 806.20 3396 | 806.21 3397 | 806.22 3398 | 806.23 3399 | 806.24 3400 | 806.25 3401 | 806.26 3402 | 806.29 3403 | 806.31 3404 | 806.39 3405 | 806.4 3406 | 806.5 3407 | 806.60 3408 | 806.62 3409 | 806.8 3410 | 807.00 3411 | 807.01 3412 | 807.02 3413 | 807.03 3414 | 807.04 3415 | 807.05 3416 | 807.06 3417 | 807.07 3418 | 807.08 3419 | 807.09 3420 | 807.10 3421 | 807.2 3422 | 807.3 3423 | 807.4 3424 | 807.5 3425 | 808.0 3426 | 808.1 3427 | 808.2 3428 | 808.3 3429 | 808.41 3430 | 808.42 3431 | 808.43 3432 | 808.49 3433 | 808.51 3434 | 808.53 3435 | 808.8 3436 | 808.9 3437 | 810.00 3438 | 810.01 3439 | 810.02 3440 | 810.03 3441 | 810.10 3442 | 811.00 3443 | 811.01 3444 | 811.02 3445 | 811.03 3446 | 811.09 3447 | 811.10 3448 | 812.00 3449 | 812.01 3450 | 812.02 3451 | 812.03 3452 | 812.09 3453 | 812.10 3454 | 812.12 3455 | 812.19 3456 | 812.20 3457 | 812.21 3458 | 812.30 3459 | 812.31 3460 | 812.40 3461 | 812.41 3462 | 812.42 3463 | 812.43 3464 | 812.44 3465 | 812.49 3466 | 812.50 3467 | 812.51 3468 | 812.52 3469 | 812.59 3470 | 813.01 3471 | 813.02 3472 | 813.03 3473 | 813.05 3474 | 813.07 3475 | 813.08 3476 | 813.11 3477 | 813.18 3478 | 813.21 3479 | 813.22 3480 | 813.23 3481 | 813.31 3482 | 813.32 3483 | 813.33 3484 | 813.41 3485 | 813.42 3486 | 813.43 3487 | 813.44 3488 | 813.51 3489 | 813.52 3490 | 813.54 3491 | 813.81 3492 | 813.82 3493 | 813.83 3494 | 813.91 3495 | 813.92 3496 | 813.93 3497 | 814.00 3498 | 814.01 3499 | 814.02 3500 | 814.03 3501 | 814.05 3502 | 814.06 3503 | 814.07 3504 | 814.08 3505 | 814.09 3506 | 814.12 3507 | 814.18 3508 | 814.19 3509 | 815.00 3510 | 815.01 3511 | 815.02 3512 | 815.03 3513 | 815.04 3514 | 815.09 3515 | 815.10 3516 | 815.11 3517 | 815.12 3518 | 815.13 3519 | 815.14 3520 | 815.19 3521 | 816.00 3522 | 816.01 3523 | 816.02 3524 | 816.03 3525 | 816.11 3526 | 816.12 3527 | 816.13 3528 | 817.0 3529 | 820.01 3530 | 820.02 3531 | 820.03 3532 | 820.09 3533 | 820.19 3534 | 820.20 3535 | 820.21 3536 | 820.22 3537 | 820.32 3538 | 820.8 3539 | 820.9 3540 | 821.00 3541 | 821.01 3542 | 821.10 3543 | 821.11 3544 | 821.20 3545 | 821.21 3546 | 821.22 3547 | 821.23 3548 | 821.29 3549 | 821.30 3550 | 821.31 3551 | 821.32 3552 | 821.33 3553 | 821.39 3554 | 822.0 3555 | 822.1 3556 | 823.00 3557 | 823.01 3558 | 823.02 3559 | 823.10 3560 | 823.12 3561 | 823.20 3562 | 823.21 3563 | 823.22 3564 | 823.30 3565 | 823.32 3566 | 823.42 3567 | 823.80 3568 | 823.81 3569 | 823.82 3570 | 823.90 3571 | 823.92 3572 | 824.0 3573 | 824.1 3574 | 824.2 3575 | 824.3 3576 | 824.4 3577 | 824.5 3578 | 824.6 3579 | 824.7 3580 | 824.8 3581 | 824.9 3582 | 825.0 3583 | 825.1 3584 | 825.21 3585 | 825.22 3586 | 825.23 3587 | 825.25 3588 | 825.29 3589 | 825.31 3590 | 825.35 3591 | 826.0 3592 | 826.1 3593 | 828.0 3594 | 828.1 3595 | 830.0 3596 | 831.00 3597 | 831.01 3598 | 831.03 3599 | 831.04 3600 | 831.09 3601 | 831.14 3602 | 832.00 3603 | 832.02 3604 | 832.09 3605 | 833.02 3606 | 833.05 3607 | 833.11 3608 | 834.02 3609 | 835.00 3610 | 835.01 3611 | 836.0 3612 | 836.1 3613 | 836.2 3614 | 836.3 3615 | 836.50 3616 | 836.51 3617 | 836.52 3618 | 836.61 3619 | 837.1 3620 | 838.05 3621 | 838.09 3622 | 838.19 3623 | 839.00 3624 | 839.01 3625 | 839.02 3626 | 839.03 3627 | 839.04 3628 | 839.05 3629 | 839.06 3630 | 839.08 3631 | 839.20 3632 | 839.21 3633 | 839.42 3634 | 839.61 3635 | 839.69 3636 | 839.79 3637 | 840.3 3638 | 840.4 3639 | 840.8 3640 | 840.9 3641 | 841.1 3642 | 841.8 3643 | 842.00 3644 | 842.09 3645 | 843.9 3646 | 844.0 3647 | 844.1 3648 | 844.2 3649 | 844.8 3650 | 844.9 3651 | 845.00 3652 | 845.03 3653 | 846.0 3654 | 847.0 3655 | 847.1 3656 | 847.2 3657 | 847.9 3658 | 850.0 3659 | 850.1 3660 | 850.11 3661 | 850.2 3662 | 850.4 3663 | 850.5 3664 | 850.9 3665 | 851.00 3666 | 851.01 3667 | 851.02 3668 | 851.05 3669 | 851.09 3670 | 851.31 3671 | 851.35 3672 | 851.40 3673 | 851.41 3674 | 851.42 3675 | 851.44 3676 | 851.45 3677 | 851.46 3678 | 851.73 3679 | 851.75 3680 | 851.80 3681 | 851.81 3682 | 851.82 3683 | 851.84 3684 | 851.85 3685 | 851.86 3686 | 851.89 3687 | 851.90 3688 | 851.96 3689 | 852.00 3690 | 852.01 3691 | 852.02 3692 | 852.03 3693 | 852.05 3694 | 852.06 3695 | 852.09 3696 | 852.10 3697 | 852.11 3698 | 852.12 3699 | 852.15 3700 | 852.19 3701 | 852.20 3702 | 852.21 3703 | 852.22 3704 | 852.25 3705 | 852.26 3706 | 852.29 3707 | 852.31 3708 | 852.36 3709 | 852.39 3710 | 852.40 3711 | 852.41 3712 | 852.42 3713 | 852.46 3714 | 853.00 3715 | 853.01 3716 | 853.02 3717 | 853.04 3718 | 853.05 3719 | 853.06 3720 | 853.09 3721 | 853.10 3722 | 853.14 3723 | 854.00 3724 | 854.02 3725 | 854.04 3726 | 854.05 3727 | 854.06 3728 | 860.0 3729 | 860.1 3730 | 860.2 3731 | 860.3 3732 | 860.4 3733 | 860.5 3734 | 861.00 3735 | 861.01 3736 | 861.10 3737 | 861.12 3738 | 861.13 3739 | 861.21 3740 | 861.22 3741 | 861.30 3742 | 861.31 3743 | 861.32 3744 | 862.0 3745 | 862.1 3746 | 862.22 3747 | 862.29 3748 | 862.32 3749 | 862.39 3750 | 862.9 3751 | 863.0 3752 | 863.1 3753 | 863.20 3754 | 863.21 3755 | 863.29 3756 | 863.30 3757 | 863.31 3758 | 863.39 3759 | 863.40 3760 | 863.42 3761 | 863.43 3762 | 863.44 3763 | 863.45 3764 | 863.49 3765 | 863.50 3766 | 863.52 3767 | 863.53 3768 | 863.54 3769 | 863.55 3770 | 863.81 3771 | 863.89 3772 | 863.93 3773 | 863.99 3774 | 864.00 3775 | 864.01 3776 | 864.02 3777 | 864.03 3778 | 864.04 3779 | 864.05 3780 | 864.09 3781 | 864.11 3782 | 864.12 3783 | 864.13 3784 | 864.14 3785 | 864.15 3786 | 864.19 3787 | 865.00 3788 | 865.01 3789 | 865.02 3790 | 865.03 3791 | 865.04 3792 | 865.09 3793 | 865.11 3794 | 865.12 3795 | 865.13 3796 | 865.14 3797 | 866.00 3798 | 866.01 3799 | 866.02 3800 | 866.03 3801 | 866.10 3802 | 866.11 3803 | 866.12 3804 | 866.13 3805 | 867.0 3806 | 867.1 3807 | 867.2 3808 | 867.6 3809 | 867.7 3810 | 867.8 3811 | 867.9 3812 | 868.01 3813 | 868.02 3814 | 868.03 3815 | 868.04 3816 | 868.09 3817 | 868.11 3818 | 868.12 3819 | 868.13 3820 | 868.14 3821 | 868.19 3822 | 869.0 3823 | 870.0 3824 | 870.1 3825 | 870.2 3826 | 870.3 3827 | 870.8 3828 | 871.0 3829 | 871.1 3830 | 871.2 3831 | 871.3 3832 | 871.6 3833 | 872.00 3834 | 872.01 3835 | 872.02 3836 | 872.61 3837 | 872.8 3838 | 873.0 3839 | 873.1 3840 | 873.20 3841 | 873.21 3842 | 873.22 3843 | 873.30 3844 | 873.40 3845 | 873.41 3846 | 873.42 3847 | 873.43 3848 | 873.44 3849 | 873.49 3850 | 873.50 3851 | 873.51 3852 | 873.52 3853 | 873.53 3854 | 873.54 3855 | 873.59 3856 | 873.60 3857 | 873.61 3858 | 873.63 3859 | 873.64 3860 | 873.65 3861 | 873.71 3862 | 873.73 3863 | 873.74 3864 | 873.8 3865 | 874.02 3866 | 874.11 3867 | 874.12 3868 | 874.2 3869 | 874.4 3870 | 874.8 3871 | 874.9 3872 | 875.0 3873 | 875.1 3874 | 876.0 3875 | 876.1 3876 | 877.0 3877 | 878.0 3878 | 878.2 3879 | 878.5 3880 | 878.6 3881 | 878.7 3882 | 879.0 3883 | 879.1 3884 | 879.2 3885 | 879.3 3886 | 879.4 3887 | 879.5 3888 | 879.6 3889 | 879.7 3890 | 879.8 3891 | 879.9 3892 | 880.00 3893 | 880.01 3894 | 880.02 3895 | 880.03 3896 | 880.10 3897 | 880.12 3898 | 880.13 3899 | 880.19 3900 | 880.20 3901 | 880.23 3902 | 881.00 3903 | 881.01 3904 | 881.02 3905 | 881.10 3906 | 881.11 3907 | 881.12 3908 | 881.20 3909 | 881.21 3910 | 881.22 3911 | 882.0 3912 | 882.1 3913 | 882.2 3914 | 883.0 3915 | 883.1 3916 | 883.2 3917 | 884.0 3918 | 885.0 3919 | 885.1 3920 | 886.0 3921 | 886.1 3922 | 887.0 3923 | 887.1 3924 | 887.2 3925 | 887.3 3926 | 887.5 3927 | 890.0 3928 | 890.1 3929 | 891.0 3930 | 891.1 3931 | 891.2 3932 | 892.0 3933 | 892.1 3934 | 892.2 3935 | 893.0 3936 | 894.0 3937 | 896.0 3938 | 897.0 3939 | 897.2 3940 | 897.3 3941 | 897.7 3942 | 900.01 3943 | 900.02 3944 | 900.03 3945 | 900.1 3946 | 900.81 3947 | 900.82 3948 | 900.89 3949 | 900.9 3950 | 901.0 3951 | 901.1 3952 | 901.2 3953 | 901.3 3954 | 901.40 3955 | 901.41 3956 | 901.42 3957 | 901.82 3958 | 901.9 3959 | 902.0 3960 | 902.20 3961 | 902.21 3962 | 902.22 3963 | 902.23 3964 | 902.26 3965 | 902.29 3966 | 902.33 3967 | 902.34 3968 | 902.41 3969 | 902.51 3970 | 902.53 3971 | 902.54 3972 | 902.87 3973 | 902.89 3974 | 902.9 3975 | 903.01 3976 | 903.1 3977 | 903.2 3978 | 903.3 3979 | 903.4 3980 | 903.5 3981 | 903.8 3982 | 903.9 3983 | 904.0 3984 | 904.1 3985 | 904.2 3986 | 904.3 3987 | 904.41 3988 | 904.42 3989 | 904.53 3990 | 904.6 3991 | 904.7 3992 | 904.8 3993 | 905.0 3994 | 905.1 3995 | 905.2 3996 | 905.3 3997 | 905.4 3998 | 906.1 3999 | 906.4 4000 | 906.7 4001 | 906.8 4002 | 907.0 4003 | 907.2 4004 | 907.5 4005 | 908.0 4006 | 908.1 4007 | 908.2 4008 | 908.9 4009 | 909.0 4010 | 909.2 4011 | 909.3 4012 | 909.4 4013 | 910.0 4014 | 910.8 4015 | 911.0 4016 | 911.2 4017 | 912.0 4018 | 913.0 4019 | 914.0 4020 | 914.9 4021 | 916.0 4022 | 916.1 4023 | 916.2 4024 | 916.4 4025 | 917.1 4026 | 917.2 4027 | 918.0 4028 | 918.1 4029 | 919.0 4030 | 919.1 4031 | 919.8 4032 | 920 4033 | 921.0 4034 | 921.1 4035 | 921.2 4036 | 921.3 4037 | 921.9 4038 | 922.0 4039 | 922.1 4040 | 922.2 4041 | 922.31 4042 | 922.32 4043 | 922.4 4044 | 922.8 4045 | 923.00 4046 | 923.01 4047 | 923.03 4048 | 923.10 4049 | 923.11 4050 | 923.20 4051 | 923.3 4052 | 923.8 4053 | 923.9 4054 | 924.00 4055 | 924.01 4056 | 924.10 4057 | 924.11 4058 | 924.21 4059 | 924.3 4060 | 924.5 4061 | 924.8 4062 | 924.9 4063 | 926.0 4064 | 926.12 4065 | 926.19 4066 | 927.10 4067 | 927.20 4068 | 927.21 4069 | 927.3 4070 | 927.8 4071 | 928.00 4072 | 928.01 4073 | 928.10 4074 | 928.11 4075 | 928.20 4076 | 930.1 4077 | 930.8 4078 | 932 4079 | 933.0 4080 | 933.1 4081 | 934.0 4082 | 934.1 4083 | 934.8 4084 | 934.9 4085 | 935.1 4086 | 935.2 4087 | 936 4088 | 938 4089 | 939.2 4090 | 941.27 4091 | 941.28 4092 | 942.03 4093 | 942.04 4094 | 942.14 4095 | 942.24 4096 | 942.34 4097 | 943.32 4098 | 944.20 4099 | 945.22 4100 | 945.32 4101 | 945.34 4102 | 945.36 4103 | 946.2 4104 | 947.1 4105 | 948.00 4106 | 948.40 4107 | 950.0 4108 | 950.9 4109 | 951.0 4110 | 951.3 4111 | 951.4 4112 | 951.5 4113 | 951.8 4114 | 952.00 4115 | 952.02 4116 | 952.03 4117 | 952.04 4118 | 952.05 4119 | 952.06 4120 | 952.08 4121 | 952.09 4122 | 952.14 4123 | 952.15 4124 | 952.3 4125 | 952.4 4126 | 952.8 4127 | 952.9 4128 | 953.0 4129 | 953.4 4130 | 953.9 4131 | 955.1 4132 | 955.2 4133 | 955.3 4134 | 955.5 4135 | 955.7 4136 | 955.8 4137 | 955.9 4138 | 956.1 4139 | 956.2 4140 | 956.3 4141 | 956.9 4142 | 957.0 4143 | 957.1 4144 | 957.8 4145 | 957.9 4146 | 958.0 4147 | 958.1 4148 | 958.2 4149 | 958.3 4150 | 958.4 4151 | 958.5 4152 | 958.7 4153 | 958.8 4154 | 958.91 4155 | 958.92 4156 | 958.93 4157 | 958.99 4158 | 959.01 4159 | 959.09 4160 | 959.11 4161 | 959.12 4162 | 959.14 4163 | 959.3 4164 | 959.7 4165 | 959.8 4166 | 959.9 4167 | 960.4 4168 | 960.5 4169 | 961.4 4170 | 961.8 4171 | 962.3 4172 | 962.7 4173 | 963.0 4174 | 963.1 4175 | 964.2 4176 | 965.00 4177 | 965.01 4178 | 965.02 4179 | 965.09 4180 | 965.1 4181 | 965.4 4182 | 965.61 4183 | 965.8 4184 | 966.1 4185 | 966.3 4186 | 966.4 4187 | 967.0 4188 | 967.1 4189 | 967.8 4190 | 967.9 4191 | 968.0 4192 | 968.4 4193 | 968.5 4194 | 969.0 4195 | 969.1 4196 | 969.3 4197 | 969.4 4198 | 969.5 4199 | 969.6 4200 | 969.7 4201 | 969.8 4202 | 970.1 4203 | 970.8 4204 | 971.0 4205 | 971.1 4206 | 971.2 4207 | 971.3 4208 | 972.0 4209 | 972.1 4210 | 972.2 4211 | 972.4 4212 | 972.6 4213 | 972.9 4214 | 975.2 4215 | 975.3 4216 | 975.4 4217 | 976.0 4218 | 976.6 4219 | 976.7 4220 | 977.8 4221 | 977.9 4222 | 980.0 4223 | 980.3 4224 | 980.9 4225 | 982.8 4226 | 983.1 4227 | 983.2 4228 | 983.9 4229 | 985.1 4230 | 985.8 4231 | 986 4232 | 987.8 4233 | 988.1 4234 | 989.0 4235 | 989.3 4236 | 989.4 4237 | 989.5 4238 | 989.89 4239 | 989.9 4240 | 990 4241 | 991.1 4242 | 991.2 4243 | 991.3 4244 | 991.6 4245 | 992.0 4246 | 994.1 4247 | 994.2 4248 | 994.7 4249 | 994.8 4250 | 995.0 4251 | 995.1 4252 | 995.2 4253 | 995.27 4254 | 995.29 4255 | 995.3 4256 | 995.62 4257 | 995.64 4258 | 995.7 4259 | 995.80 4260 | 995.81 4261 | 995.83 4262 | 995.89 4263 | 995.90 4264 | 995.91 4265 | 995.92 4266 | 995.93 4267 | 995.94 4268 | 996.01 4269 | 996.02 4270 | 996.03 4271 | 996.04 4272 | 996.09 4273 | 996.1 4274 | 996.2 4275 | 996.31 4276 | 996.39 4277 | 996.4 4278 | 996.40 4279 | 996.41 4280 | 996.42 4281 | 996.43 4282 | 996.44 4283 | 996.45 4284 | 996.47 4285 | 996.49 4286 | 996.52 4287 | 996.53 4288 | 996.54 4289 | 996.56 4290 | 996.57 4291 | 996.59 4292 | 996.61 4293 | 996.62 4294 | 996.63 4295 | 996.64 4296 | 996.65 4297 | 996.66 4298 | 996.67 4299 | 996.68 4300 | 996.69 4301 | 996.71 4302 | 996.72 4303 | 996.73 4304 | 996.74 4305 | 996.75 4306 | 996.76 4307 | 996.77 4308 | 996.78 4309 | 996.79 4310 | 996.81 4311 | 996.82 4312 | 996.83 4313 | 996.84 4314 | 996.85 4315 | 996.86 4316 | 996.89 4317 | 996.93 4318 | 997.01 4319 | 997.02 4320 | 997.09 4321 | 997.1 4322 | 997.2 4323 | 997.3 4324 | 997.4 4325 | 997.5 4326 | 997.62 4327 | 997.69 4328 | 997.71 4329 | 997.72 4330 | 997.79 4331 | 997.91 4332 | 997.99 4333 | 998.0 4334 | 998.11 4335 | 998.12 4336 | 998.13 4337 | 998.2 4338 | 998.3 4339 | 998.31 4340 | 998.32 4341 | 998.4 4342 | 998.51 4343 | 998.59 4344 | 998.6 4345 | 998.81 4346 | 998.83 4347 | 998.89 4348 | 998.9 4349 | 999.1 4350 | 999.2 4351 | 999.3 4352 | 999.31 4353 | 999.39 4354 | 999.5 4355 | 999.6 4356 | 999.8 4357 | 999.9 4358 | E800.2 4359 | E801.2 4360 | E804.2 4361 | E806.2 4362 | E811.0 4363 | E812.0 4364 | E812.1 4365 | E812.2 4366 | E812.3 4367 | E812.6 4368 | E812.7 4369 | E812.9 4370 | E813.0 4371 | E813.1 4372 | E813.2 4373 | E813.3 4374 | E813.6 4375 | E814.0 4376 | E814.1 4377 | E814.2 4378 | E814.6 4379 | E814.7 4380 | E815.0 4381 | E815.1 4382 | E815.2 4383 | E815.6 4384 | E815.9 4385 | E816.0 4386 | E816.1 4387 | E816.2 4388 | E816.3 4389 | E818.0 4390 | E818.1 4391 | E818.2 4392 | E818.7 4393 | E819.0 4394 | E819.1 4395 | E819.2 4396 | E819.3 4397 | E819.6 4398 | E819.7 4399 | E819.9 4400 | E820.0 4401 | E821.0 4402 | E821.1 4403 | E821.2 4404 | E821.6 4405 | E821.7 4406 | E822.7 4407 | E822.8 4408 | E823.0 4409 | E823.1 4410 | E823.2 4411 | E823.3 4412 | E823.7 4413 | E823.8 4414 | E824.1 4415 | E824.2 4416 | E825.0 4417 | E825.1 4418 | E825.2 4419 | E825.7 4420 | E826.0 4421 | E826.1 4422 | E828.2 4423 | E829.8 4424 | E831.1 4425 | E834.1 4426 | E834.3 4427 | E834.8 4428 | E835.3 4429 | E838.4 4430 | E840.5 4431 | E841.5 4432 | E848 4433 | E849.0 4434 | E849.3 4435 | E849.4 4436 | E849.5 4437 | E849.6 4438 | E849.7 4439 | E849.8 4440 | E849.9 4441 | E850.0 4442 | E850.1 4443 | E850.2 4444 | E850.3 4445 | E850.4 4446 | E850.8 4447 | E851 4448 | E852.8 4449 | E852.9 4450 | E853.2 4451 | E853.8 4452 | E854.0 4453 | E854.1 4454 | E854.2 4455 | E854.3 4456 | E854.8 4457 | E855.0 4458 | E855.1 4459 | E855.2 4460 | E855.5 4461 | E855.6 4462 | E858.0 4463 | E858.1 4464 | E858.2 4465 | E858.3 4466 | E858.6 4467 | E858.7 4468 | E858.8 4469 | E858.9 4470 | E860.0 4471 | E860.4 4472 | E860.9 4473 | E861.3 4474 | E862.4 4475 | E864.1 4476 | E865.4 4477 | E865.5 4478 | E866.3 4479 | E866.8 4480 | E869.8 4481 | E870.0 4482 | E870.2 4483 | E870.4 4484 | E870.5 4485 | E870.6 4486 | E870.8 4487 | E871.4 4488 | E871.6 4489 | E871.7 4490 | E871.8 4491 | E874.4 4492 | E876.4 4493 | E876.8 4494 | E876.9 4495 | E878.0 4496 | E878.1 4497 | E878.2 4498 | E878.3 4499 | E878.4 4500 | E878.5 4501 | E878.6 4502 | E878.8 4503 | E878.9 4504 | E879.0 4505 | E879.1 4506 | E879.2 4507 | E879.3 4508 | E879.4 4509 | E879.6 4510 | E879.7 4511 | E879.8 4512 | E879.9 4513 | E880.0 4514 | E880.1 4515 | E880.9 4516 | E881.0 4517 | E881.1 4518 | E882 4519 | E883.0 4520 | E884.1 4521 | E884.2 4522 | E884.3 4523 | E884.4 4524 | E884.5 4525 | E884.6 4526 | E884.9 4527 | E885.0 4528 | E885.1 4529 | E885.2 4530 | E885.3 4531 | E885.4 4532 | E885.9 4533 | E886.0 4534 | E887 4535 | E888 4536 | E888.0 4537 | E888.1 4538 | E888.8 4539 | E888.9 4540 | E891.8 4541 | E899 4542 | E900.0 4543 | E901.0 4544 | E901.1 4545 | E901.8 4546 | E901.9 4547 | E905.3 4548 | E905.5 4549 | E906.4 4550 | E906.8 4551 | E908.1 4552 | E910.2 4553 | E910.8 4554 | E910.9 4555 | E911 4556 | E912 4557 | E915 4558 | E916 4559 | E917.0 4560 | E917.3 4561 | E917.4 4562 | E917.5 4563 | E917.7 4564 | E917.8 4565 | E917.9 4566 | E918 4567 | E919.0 4568 | E919.2 4569 | E919.3 4570 | E919.4 4571 | E919.6 4572 | E919.8 4573 | E920.1 4574 | E920.4 4575 | E920.8 4576 | E920.9 4577 | E922.0 4578 | E922.2 4579 | E922.5 4580 | E922.9 4581 | E923.8 4582 | E924.0 4583 | E924.1 4584 | E924.8 4585 | E924.9 4586 | E927 4587 | E928.3 4588 | E928.8 4589 | E928.9 4590 | E929.0 4591 | E929.1 4592 | E929.2 4593 | E929.3 4594 | E929.8 4595 | E929.9 4596 | E930.0 4597 | E930.1 4598 | E930.4 4599 | E930.5 4600 | E930.7 4601 | E930.8 4602 | E930.9 4603 | E931.0 4604 | E931.3 4605 | E931.4 4606 | E931.5 4607 | E931.7 4608 | E931.8 4609 | E931.9 4610 | E932.0 4611 | E932.2 4612 | E932.3 4613 | E932.5 4614 | E932.8 4615 | E933.0 4616 | E933.1 4617 | E933.8 4618 | E934.2 4619 | E934.4 4620 | E934.5 4621 | E934.6 4622 | E934.7 4623 | E934.8 4624 | E935.1 4625 | E935.2 4626 | E935.3 4627 | E935.4 4628 | E935.6 4629 | E935.7 4630 | E935.8 4631 | E935.9 4632 | E936.0 4633 | E936.1 4634 | E936.3 4635 | E936.4 4636 | E937.0 4637 | E937.8 4638 | E937.9 4639 | E938.0 4640 | E938.3 4641 | E938.4 4642 | E938.5 4643 | E938.7 4644 | E938.9 4645 | E939.0 4646 | E939.1 4647 | E939.2 4648 | E939.3 4649 | E939.4 4650 | E939.7 4651 | E939.8 4652 | E940.1 4653 | E940.8 4654 | E941.0 4655 | E941.1 4656 | E941.2 4657 | E941.3 4658 | E942.0 4659 | E942.1 4660 | E942.2 4661 | E942.4 4662 | E942.5 4663 | E942.6 4664 | E942.9 4665 | E943.0 4666 | E943.3 4667 | E943.8 4668 | E944.1 4669 | E944.3 4670 | E944.4 4671 | E944.5 4672 | E944.7 4673 | E945.1 4674 | E945.2 4675 | E945.7 4676 | E947.1 4677 | E947.8 4678 | E947.9 4679 | E949.6 4680 | E949.9 4681 | E950.0 4682 | E950.1 4683 | E950.2 4684 | E950.3 4685 | E950.4 4686 | E950.6 4687 | E950.7 4688 | E950.9 4689 | E953.0 4690 | E953.8 4691 | E954 4692 | E955.0 4693 | E955.4 4694 | E956 4695 | E957.0 4696 | E957.1 4697 | E957.2 4698 | E957.9 4699 | E958.0 4700 | E958.1 4701 | E958.5 4702 | E958.8 4703 | E958.9 4704 | E959 4705 | E960.0 4706 | E962.0 4707 | E963 4708 | E964 4709 | E965.0 4710 | E965.1 4711 | E965.4 4712 | E965.9 4713 | E966 4714 | E967.3 4715 | E967.4 4716 | E967.7 4717 | E968.1 4718 | E968.2 4719 | E968.7 4720 | E968.8 4721 | E968.9 4722 | E969 4723 | E970 4724 | E980.0 4725 | E980.3 4726 | E980.4 4727 | E980.5 4728 | E980.9 4729 | E985.0 4730 | E986 4731 | E987.1 4732 | E988.8 4733 | E988.9 4734 | E989 4735 | V01.1 4736 | V01.7 4737 | V01.79 4738 | V01.89 4739 | V02.3 4740 | V02.51 4741 | V02.59 4742 | V02.61 4743 | V02.62 4744 | V03.82 4745 | V05.8 4746 | V07.1 4747 | V07.4 4748 | V08 4749 | V09.0 4750 | V09.71 4751 | V09.80 4752 | V09.81 4753 | V09.91 4754 | V10.00 4755 | V10.01 4756 | V10.02 4757 | V10.03 4758 | V10.04 4759 | V10.05 4760 | V10.06 4761 | V10.07 4762 | V10.09 4763 | V10.11 4764 | V10.12 4765 | V10.20 4766 | V10.21 4767 | V10.29 4768 | V10.3 4769 | V10.41 4770 | V10.42 4771 | V10.43 4772 | V10.44 4773 | V10.46 4774 | V10.47 4775 | V10.49 4776 | V10.50 4777 | V10.51 4778 | V10.52 4779 | V10.53 4780 | V10.59 4781 | V10.60 4782 | V10.61 4783 | V10.62 4784 | V10.69 4785 | V10.72 4786 | V10.79 4787 | V10.81 4788 | V10.82 4789 | V10.83 4790 | V10.84 4791 | V10.85 4792 | V10.87 4793 | V10.88 4794 | V10.89 4795 | V10.9 4796 | V11.0 4797 | V11.1 4798 | V11.3 4799 | V11.8 4800 | V12.01 4801 | V12.02 4802 | V12.03 4803 | V12.09 4804 | V12.2 4805 | V12.41 4806 | V12.42 4807 | V12.49 4808 | V12.50 4809 | V12.51 4810 | V12.52 4811 | V12.53 4812 | V12.54 4813 | V12.59 4814 | V12.71 4815 | V12.72 4816 | V12.79 4817 | V13.01 4818 | V13.02 4819 | V13.09 4820 | V13.5 4821 | V13.8 4822 | V14.0 4823 | V14.2 4824 | V14.5 4825 | V14.6 4826 | V14.8 4827 | V15.02 4828 | V15.07 4829 | V15.08 4830 | V15.09 4831 | V15.1 4832 | V15.2 4833 | V15.3 4834 | V15.41 4835 | V15.5 4836 | V15.81 4837 | V15.82 4838 | V15.84 4839 | V15.86 4840 | V15.88 4841 | V15.89 4842 | V16.0 4843 | V16.1 4844 | V16.2 4845 | V16.3 4846 | V16.41 4847 | V16.42 4848 | V16.49 4849 | V16.51 4850 | V16.59 4851 | V16.6 4852 | V16.7 4853 | V16.8 4854 | V16.9 4855 | V17.0 4856 | V17.1 4857 | V17.3 4858 | V17.4 4859 | V17.49 4860 | V17.5 4861 | V18.0 4862 | V18.1 4863 | V18.19 4864 | V18.2 4865 | V18.3 4866 | V18.51 4867 | V18.59 4868 | V18.69 4869 | V19.5 4870 | V19.8 4871 | V22.2 4872 | V23.0 4873 | V23.7 4874 | V23.9 4875 | V25.2 4876 | V26.52 4877 | V27.0 4878 | V27.1 4879 | V27.2 4880 | V27.4 4881 | V40.0 4882 | V40.3 4883 | V42.0 4884 | V42.1 4885 | V42.2 4886 | V42.5 4887 | V42.7 4888 | V42.81 4889 | V42.82 4890 | V42.83 4891 | V42.89 4892 | V43.1 4893 | V43.3 4894 | V43.4 4895 | V43.61 4896 | V43.64 4897 | V43.65 4898 | V44.0 4899 | V44.1 4900 | V44.2 4901 | V44.3 4902 | V44.4 4903 | V44.50 4904 | V44.59 4905 | V44.6 4906 | V44.8 4907 | V44.9 4908 | V45.01 4909 | V45.02 4910 | V45.09 4911 | V45.1 4912 | V45.2 4913 | V45.3 4914 | V45.4 4915 | V45.61 4916 | V45.71 4917 | V45.72 4918 | V45.73 4919 | V45.74 4920 | V45.76 4921 | V45.77 4922 | V45.78 4923 | V45.79 4924 | V45.81 4925 | V45.82 4926 | V45.85 4927 | V45.86 4928 | V45.89 4929 | V46.1 4930 | V46.11 4931 | V46.2 4932 | V46.8 4933 | V49.60 4934 | V49.62 4935 | V49.65 4936 | V49.71 4937 | V49.72 4938 | V49.73 4939 | V49.75 4940 | V49.76 4941 | V49.81 4942 | V49.83 4943 | V49.84 4944 | V50.41 4945 | V51 4946 | V53.09 4947 | V53.31 4948 | V53.32 4949 | V53.39 4950 | V53.91 4951 | V54.01 4952 | V54.11 4953 | V54.13 4954 | V54.16 4955 | V54.17 4956 | V54.19 4957 | V54.27 4958 | V54.81 4959 | V54.89 4960 | V55.0 4961 | V55.1 4962 | V55.2 4963 | V55.3 4964 | V55.4 4965 | V55.5 4966 | V55.6 4967 | V55.8 4968 | V56.0 4969 | V58.0 4970 | V58.1 4971 | V58.11 4972 | V58.12 4973 | V58.41 4974 | V58.42 4975 | V58.49 4976 | V58.61 4977 | V58.62 4978 | V58.63 4979 | V58.64 4980 | V58.65 4981 | V58.66 4982 | V58.67 4983 | V58.69 4984 | V58.81 4985 | V58.83 4986 | V59.6 4987 | V60.0 4988 | V60.2 4989 | V60.8 4990 | V61.0 4991 | V61.10 4992 | V61.11 4993 | V61.29 4994 | V61.8 4995 | V62.0 4996 | V62.4 4997 | V62.5 4998 | V62.6 4999 | V62.82 5000 | V62.84 5001 | V62.89 5002 | V63.2 5003 | V63.8 5004 | V64.1 5005 | V64.2 5006 | V64.3 5007 | V64.4 5008 | V64.41 5009 | V64.42 5010 | V64.43 5011 | V65.2 5012 | V65.49 5013 | V66.7 5014 | V69.4 5015 | V70.7 5016 | V70.8 5017 | V71.2 5018 | V71.4 5019 | V71.6 5020 | V84.01 5021 | V85.0 5022 | V85.1 5023 | V85.22 5024 | V85.23 5025 | V85.24 5026 | V85.30 5027 | V85.31 5028 | V85.32 5029 | V85.33 5030 | V85.37 5031 | V85.4 5032 | -------------------------------------------------------------------------------- /data/mimic3/ALL_CODES_50.txt: -------------------------------------------------------------------------------- 1 | 038.9 2 | 244.9 3 | 250.00 4 | 272.0 5 | 272.4 6 | 276.1 7 | 276.2 8 | 285.1 9 | 285.9 10 | 287.5 11 | 305.1 12 | 311 13 | 33.24 14 | 36.15 15 | 37.22 16 | 37.23 17 | 38.91 18 | 38.93 19 | 39.61 20 | 39.95 21 | 401.9 22 | 403.90 23 | 410.71 24 | 412 25 | 414.01 26 | 424.0 27 | 427.31 28 | 428.0 29 | 45.13 30 | 486 31 | 496 32 | 507.0 33 | 511.9 34 | 518.81 35 | 530.81 36 | 584.9 37 | 585.9 38 | 599.0 39 | 88.56 40 | 88.72 41 | 96.04 42 | 96.6 43 | 96.71 44 | 96.72 45 | 99.04 46 | 99.15 47 | 995.92 48 | V15.82 49 | V45.81 50 | V58.61 51 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | accelerate==0.2.1 2 | datasets==1.6.0 3 | jedi==0.17.1 4 | numpy==1.19.5 5 | pandas==1.1.5 6 | scikit-learn==0.24.1 7 | scipy==1.5.4 8 | tokenizers==0.10.2 9 | torch==1.8.1 10 | torchaudio==0.8.1 11 | torchvision==0.9.1 12 | tqdm==4.49.0 13 | transformers==4.5.0 14 | -------------------------------------------------------------------------------- /src/evaluation.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file contains evaluation methods that take in a set of predicted labels 3 | and a set of ground truth labels and calculate precision, recall, accuracy, f1, and metrics @k 4 | """ 5 | from collections import defaultdict 6 | import csv 7 | import json 8 | import numpy as np 9 | import os 10 | import sys 11 | 12 | from sklearn.metrics import roc_curve, auc 13 | from tqdm import tqdm 14 | 15 | import datasets 16 | 17 | MIMIC_3_DIR = "../data/mimic3/" 18 | MIMIC_2_DIR = "../data/mimic2/" 19 | 20 | 21 | def all_metrics(yhat, y, k=8, yhat_raw=None, calc_auc=True): 22 | """ 23 | Inputs: 24 | yhat: binary predictions matrix 25 | y: binary ground truth matrix 26 | k: for @k metrics 27 | yhat_raw: prediction scores matrix (floats) 28 | Outputs: 29 | dict holding relevant metrics 30 | """ 31 | names = ["acc", "prec", "rec", "f1"] 32 | 33 | #macro 34 | macro = all_macro(yhat, y) 35 | 36 | #micro 37 | ymic = y.ravel() 38 | yhatmic = yhat.ravel() 39 | micro = all_micro(yhatmic, ymic) 40 | 41 | metrics = {names[i] + "_macro": macro[i] for i in range(len(macro))} 42 | metrics.update({names[i] + "_micro": micro[i] for i in range(len(micro))}) 43 | 44 | #AUC and @k 45 | if yhat_raw is not None and calc_auc: 46 | #allow k to be passed as int or list 47 | if type(k) != list: 48 | k = [k] 49 | for k_i in k: 50 | rec_at_k = recall_at_k(yhat_raw, y, k_i) 51 | metrics['rec_at_%d' % k_i] = rec_at_k 52 | prec_at_k = precision_at_k(yhat_raw, y, k_i) 53 | metrics['prec_at_%d' % k_i] = prec_at_k 54 | metrics['f1_at_%d' % k_i] = 2*(prec_at_k*rec_at_k)/(prec_at_k+rec_at_k) 55 | 56 | roc_auc = auc_metrics(yhat_raw, y, ymic) 57 | metrics.update(roc_auc) 58 | 59 | return metrics 60 | 61 | def all_macro(yhat, y): 62 | return macro_accuracy(yhat, y), macro_precision(yhat, y), macro_recall(yhat, y), macro_f1(yhat, y) 63 | 64 | def all_micro(yhatmic, ymic): 65 | return micro_accuracy(yhatmic, ymic), micro_precision(yhatmic, ymic), micro_recall(yhatmic, ymic), micro_f1(yhatmic, ymic) 66 | 67 | ######################################################################### 68 | #MACRO METRICS: calculate metric for each label and average across labels 69 | ######################################################################### 70 | 71 | def macro_accuracy(yhat, y): 72 | num = intersect_size(yhat, y, 0) / (union_size(yhat, y, 0) + 1e-10) 73 | return np.mean(num) 74 | 75 | def macro_precision(yhat, y): 76 | num = intersect_size(yhat, y, 0) / (yhat.sum(axis=0) + 1e-10) 77 | return np.mean(num) 78 | 79 | def macro_recall(yhat, y): 80 | num = intersect_size(yhat, y, 0) / (y.sum(axis=0) + 1e-10) 81 | return np.mean(num) 82 | 83 | def macro_f1(yhat, y): 84 | prec = macro_precision(yhat, y) 85 | rec = macro_recall(yhat, y) 86 | if prec + rec == 0: 87 | f1 = 0. 88 | else: 89 | f1 = 2*(prec*rec)/(prec+rec) 90 | return f1 91 | 92 | ################### 93 | # INSTANCE-AVERAGED 94 | ################### 95 | 96 | def inst_precision(yhat, y): 97 | num = intersect_size(yhat, y, 1) / yhat.sum(axis=1) 98 | #correct for divide-by-zeros 99 | num[np.isnan(num)] = 0. 100 | return np.mean(num) 101 | 102 | def inst_recall(yhat, y): 103 | num = intersect_size(yhat, y, 1) / y.sum(axis=1) 104 | #correct for divide-by-zeros 105 | num[np.isnan(num)] = 0. 106 | return np.mean(num) 107 | 108 | def inst_f1(yhat, y): 109 | prec = inst_precision(yhat, y) 110 | rec = inst_recall(yhat, y) 111 | f1 = 2*(prec*rec)/(prec+rec) 112 | return f1 113 | 114 | ############## 115 | # AT-K 116 | ############## 117 | 118 | def recall_at_k(yhat_raw, y, k): 119 | #num true labels in top k predictions / num true labels 120 | sortd = np.argsort(yhat_raw)[:,::-1] 121 | topk = sortd[:,:k] 122 | 123 | #get recall at k for each example 124 | vals = [] 125 | for i, tk in enumerate(topk): 126 | num_true_in_top_k = y[i,tk].sum() 127 | denom = y[i,:].sum() 128 | vals.append(num_true_in_top_k / float(denom)) 129 | 130 | vals = np.array(vals) 131 | vals[np.isnan(vals)] = 0. 132 | 133 | return np.mean(vals) 134 | 135 | def precision_at_k(yhat_raw, y, k): 136 | #num true labels in top k predictions / k 137 | sortd = np.argsort(yhat_raw)[:,::-1] 138 | topk = sortd[:,:k] 139 | 140 | #get precision at k for each example 141 | vals = [] 142 | for i, tk in enumerate(topk): 143 | if len(tk) > 0: 144 | num_true_in_top_k = y[i,tk].sum() 145 | denom = len(tk) 146 | vals.append(num_true_in_top_k / float(denom)) 147 | 148 | return np.mean(vals) 149 | 150 | ########################################################################## 151 | #MICRO METRICS: treat every prediction as an individual binary prediction 152 | ########################################################################## 153 | 154 | def micro_accuracy(yhatmic, ymic): 155 | return intersect_size(yhatmic, ymic, 0) / union_size(yhatmic, ymic, 0) 156 | 157 | def micro_precision(yhatmic, ymic): 158 | return intersect_size(yhatmic, ymic, 0) / yhatmic.sum(axis=0) 159 | 160 | def micro_recall(yhatmic, ymic): 161 | return intersect_size(yhatmic, ymic, 0) / ymic.sum(axis=0) 162 | 163 | def micro_f1(yhatmic, ymic): 164 | prec = micro_precision(yhatmic, ymic) 165 | rec = micro_recall(yhatmic, ymic) 166 | if prec + rec == 0: 167 | f1 = 0. 168 | else: 169 | f1 = 2*(prec*rec)/(prec+rec) 170 | return f1 171 | 172 | def auc_metrics(yhat_raw, y, ymic): 173 | if yhat_raw.shape[0] <= 1: 174 | return 175 | fpr = {} 176 | tpr = {} 177 | roc_auc = {} 178 | #get AUC for each label individually 179 | relevant_labels = [] 180 | auc_labels = {} 181 | for i in range(y.shape[1]): 182 | #only if there are true positives for this label 183 | if y[:,i].sum() > 0: 184 | fpr[i], tpr[i], _ = roc_curve(y[:,i], yhat_raw[:,i]) 185 | if len(fpr[i]) > 1 and len(tpr[i]) > 1: 186 | auc_score = auc(fpr[i], tpr[i]) 187 | if not np.isnan(auc_score): 188 | auc_labels["auc_%d" % i] = auc_score 189 | relevant_labels.append(i) 190 | 191 | #macro-AUC: just average the auc scores 192 | aucs = [] 193 | for i in relevant_labels: 194 | aucs.append(auc_labels['auc_%d' % i]) 195 | roc_auc['auc_macro'] = np.mean(aucs) 196 | 197 | #micro-AUC: just look at each individual prediction 198 | yhatmic = yhat_raw.ravel() 199 | fpr["micro"], tpr["micro"], _ = roc_curve(ymic, yhatmic) 200 | roc_auc["auc_micro"] = auc(fpr["micro"], tpr["micro"]) 201 | 202 | return roc_auc 203 | 204 | ######################## 205 | # METRICS BY CODE TYPE 206 | ######################## 207 | 208 | def results_by_type(Y, mdir, version='mimic3'): 209 | d2ind = {} 210 | p2ind = {} 211 | 212 | #get predictions for diagnoses and procedures 213 | diag_preds = defaultdict(lambda: set([])) 214 | proc_preds = defaultdict(lambda: set([])) 215 | preds = defaultdict(lambda: set()) 216 | with open('%s/preds_test.psv' % mdir, 'r') as f: 217 | r = csv.reader(f, delimiter='|') 218 | for row in r: 219 | if len(row) > 1: 220 | for code in row[1:]: 221 | preds[row[0]].add(code) 222 | if code != '': 223 | try: 224 | pos = code.index('.') 225 | if pos == 3 or (code[0] == 'E' and pos == 4): 226 | if code not in d2ind: 227 | d2ind[code] = len(d2ind) 228 | diag_preds[row[0]].add(code) 229 | elif pos == 2: 230 | if code not in p2ind: 231 | p2ind[code] = len(p2ind) 232 | proc_preds[row[0]].add(code) 233 | except: 234 | if len(code) == 3 or (code[0] == 'E' and len(code) == 4): 235 | if code not in d2ind: 236 | d2ind[code] = len(d2ind) 237 | diag_preds[row[0]].add(code) 238 | #get ground truth for diagnoses and procedures 239 | diag_golds = defaultdict(lambda: set([])) 240 | proc_golds = defaultdict(lambda: set([])) 241 | golds = defaultdict(lambda: set()) 242 | test_file = '%s/test_%s.csv' % (MIMIC_3_DIR, str(Y)) if version == 'mimic3' else '%s/test.csv' % MIMIC_2_DIR 243 | with open(test_file, 'r') as f: 244 | r = csv.reader(f) 245 | #header 246 | next(r) 247 | for row in r: 248 | codes = set([c for c in row[3].split(';')]) 249 | for code in codes: 250 | golds[row[1]].add(code) 251 | try: 252 | pos = code.index('.') 253 | if pos == 3: 254 | if code not in d2ind: 255 | d2ind[code] = len(d2ind) 256 | diag_golds[row[1]].add(code) 257 | elif pos == 2: 258 | if code not in p2ind: 259 | p2ind[code] = len(p2ind) 260 | proc_golds[row[1]].add(code) 261 | except: 262 | if len(code) == 3 or (code[0] == 'E' and len(code) == 4): 263 | if code not in d2ind: 264 | d2ind[code] = len(d2ind) 265 | diag_golds[row[1]].add(code) 266 | 267 | hadm_ids = sorted(set(diag_golds.keys()).intersection(set(diag_preds.keys()))) 268 | 269 | ind2d = {i:d for d,i in d2ind.items()} 270 | ind2p = {i:p for p,i in p2ind.items()} 271 | type_dicts = (ind2d, ind2p) 272 | return diag_preds, diag_golds, proc_preds, proc_golds, golds, preds, hadm_ids, type_dicts 273 | 274 | 275 | def diag_f1(diag_preds, diag_golds, ind2d, hadm_ids): 276 | num_labels = len(ind2d) 277 | yhat_diag = np.zeros((len(hadm_ids), num_labels)) 278 | y_diag = np.zeros((len(hadm_ids), num_labels)) 279 | for i,hadm_id in tqdm(enumerate(hadm_ids)): 280 | yhat_diag_inds = [1 if ind2d[j] in diag_preds[hadm_id] else 0 for j in range(num_labels)] 281 | gold_diag_inds = [1 if ind2d[j] in diag_golds[hadm_id] else 0 for j in range(num_labels)] 282 | yhat_diag[i] = yhat_diag_inds 283 | y_diag[i] = gold_diag_inds 284 | return micro_f1(yhat_diag.ravel(), y_diag.ravel()) 285 | 286 | def proc_f1(proc_preds, proc_golds, ind2p, hadm_ids): 287 | num_labels = len(ind2p) 288 | yhat_proc = np.zeros((len(hadm_ids), num_labels)) 289 | y_proc = np.zeros((len(hadm_ids), num_labels)) 290 | for i,hadm_id in tqdm(enumerate(hadm_ids)): 291 | yhat_proc_inds = [1 if ind2p[j] in proc_preds[hadm_id] else 0 for j in range(num_labels)] 292 | gold_proc_inds = [1 if ind2p[j] in proc_golds[hadm_id] else 0 for j in range(num_labels)] 293 | yhat_proc[i] = yhat_proc_inds 294 | y_proc[i] = gold_proc_inds 295 | return micro_f1(yhat_proc.ravel(), y_proc.ravel()) 296 | 297 | def metrics_from_dicts(preds, golds, mdir, ind2c): 298 | with open('%s/pred_100_scores_test.json' % mdir, 'r') as f: 299 | scors = json.load(f) 300 | 301 | hadm_ids = sorted(set(golds.keys()).intersection(set(preds.keys()))) 302 | num_labels = len(ind2c) 303 | yhat = np.zeros((len(hadm_ids), num_labels)) 304 | yhat_raw = np.zeros((len(hadm_ids), num_labels)) 305 | y = np.zeros((len(hadm_ids), num_labels)) 306 | for i,hadm_id in tqdm(enumerate(hadm_ids)): 307 | yhat_inds = [1 if ind2c[j] in preds[hadm_id] else 0 for j in range(num_labels)] 308 | yhat_raw_inds = [scors[hadm_id][ind2c[j]] if ind2c[j] in scors[hadm_id] else 0 for j in range(num_labels)] 309 | gold_inds = [1 if ind2c[j] in golds[hadm_id] else 0 for j in range(num_labels)] 310 | yhat[i] = yhat_inds 311 | yhat_raw[i] = yhat_raw_inds 312 | y[i] = gold_inds 313 | return yhat, yhat_raw, y, all_metrics(yhat, y, yhat_raw=yhat_raw, calc_auc=False) 314 | 315 | 316 | def union_size(yhat, y, axis): 317 | #axis=0 for label-level union (macro). axis=1 for instance-level 318 | return np.logical_or(yhat, y).sum(axis=axis).astype(float) 319 | 320 | def intersect_size(yhat, y, axis): 321 | #axis=0 for label-level union (macro). axis=1 for instance-level 322 | return np.logical_and(yhat, y).sum(axis=axis).astype(float) 323 | 324 | def print_metrics(metrics): 325 | print() 326 | if "auc_macro" in metrics.keys(): 327 | print("[MACRO] accuracy, precision, recall, f-measure, AUC") 328 | print("%.4f, %.4f, %.4f, %.4f, %.4f" % (metrics["acc_macro"], metrics["prec_macro"], metrics["rec_macro"], metrics["f1_macro"], metrics["auc_macro"])) 329 | else: 330 | print("[MACRO] accuracy, precision, recall, f-measure") 331 | print("%.4f, %.4f, %.4f, %.4f" % (metrics["acc_macro"], metrics["prec_macro"], metrics["rec_macro"], metrics["f1_macro"])) 332 | 333 | if "auc_micro" in metrics.keys(): 334 | print("[MICRO] accuracy, precision, recall, f-measure, AUC") 335 | print("%.4f, %.4f, %.4f, %.4f, %.4f" % (metrics["acc_micro"], metrics["prec_micro"], metrics["rec_micro"], metrics["f1_micro"], metrics["auc_micro"])) 336 | else: 337 | print("[MICRO] accuracy, precision, recall, f-measure") 338 | print("%.4f, %.4f, %.4f, %.4f" % (metrics["acc_micro"], metrics["prec_micro"], metrics["rec_micro"], metrics["f1_micro"])) 339 | for metric, val in metrics.items(): 340 | if metric.find("rec_at") != -1: 341 | print("%s: %.4f" % (metric, val)) 342 | print() 343 | 344 | if __name__ == "__main__": 345 | if len(sys.argv) < 5: 346 | print("usage: python " + str(os.path.basename(__file__) + " [train_dataset] [|Y| (as string)] [version (mimic2 or mimic3)] [model_dir]")) 347 | sys.exit(0) 348 | train_path, Y, version, mdir = sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4] 349 | ind2c, _ = datasets.load_full_codes(train_path, version=version) 350 | 351 | diag_preds, diag_golds, proc_preds, proc_golds, golds, preds, hadm_ids, type_dicts = results_by_type(Y, mdir, version) 352 | yhat, yhat_raw, y, metrics = metrics_from_dicts(preds, golds, mdir, ind2c) 353 | print_metrics(metrics) 354 | 355 | k = [5] if Y == '50' else [8,15] 356 | prec_at_8 = precision_at_k(yhat_raw, y, k=8) 357 | print("PRECISION@8: %.4f" % prec_at_8) 358 | prec_at_15 = precision_at_k(yhat_raw, y, k=15) 359 | print("PRECISION@15: %.4f" % prec_at_15) 360 | 361 | f1_diag = diag_f1(diag_preds, diag_golds, type_dicts[0], hadm_ids) 362 | f1_proc = proc_f1(proc_preds, proc_golds, type_dicts[1], hadm_ids) 363 | print("[BY CODE TYPE] f1-diag f1-proc") 364 | print("%.4f %.4f" % (f1_diag, f1_proc)) 365 | -------------------------------------------------------------------------------- /src/modeling_bert.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. 3 | # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | """PyTorch BERT model. """ 17 | 18 | 19 | import math 20 | import os 21 | import warnings 22 | import logging 23 | from dataclasses import dataclass 24 | from typing import Optional, Tuple 25 | 26 | import torch 27 | import torch.utils.checkpoint 28 | from torch import nn 29 | from torch.nn import BCEWithLogitsLoss 30 | 31 | from transformers import BertPreTrainedModel, BertModel 32 | from transformers.modeling_outputs import SequenceClassifierOutput 33 | 34 | 35 | class BertForMultilabelClassification(BertPreTrainedModel): 36 | def __init__(self, config): 37 | super().__init__(config) 38 | self.num_labels = config.num_labels 39 | self.model_mode = config.model_mode 40 | 41 | self.bert = BertModel(config) 42 | self.dropout = nn.Dropout(config.hidden_dropout_prob) 43 | if "cls" in self.model_mode: 44 | self.classifier = nn.Linear(config.hidden_size, config.num_labels) 45 | elif "laat" in self.model_mode: 46 | self.first_linear = nn.Linear(config.hidden_size, config.hidden_size, bias=False) 47 | self.second_linear = nn.Linear(config.hidden_size, config.num_labels, bias=False) 48 | self.third_linear = nn.Linear(config.hidden_size, config.num_labels) 49 | else: 50 | raise ValueError(f"model_mode {self.model_mode} not recognized") 51 | 52 | self.init_weights() 53 | 54 | def forward( 55 | self, 56 | input_ids=None, 57 | attention_mask=None, 58 | token_type_ids=None, 59 | position_ids=None, 60 | head_mask=None, 61 | inputs_embeds=None, 62 | labels=None, 63 | output_attentions=None, 64 | output_hidden_states=None, 65 | return_dict=None, 66 | ): 67 | r""" 68 | input_ids (torch.LongTensor of shape (batch_size, num_chunks, chunk_size)) 69 | labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, num_labels)`, `optional`): 70 | """ 71 | return_dict = return_dict if return_dict is not None else self.config.use_return_dict 72 | 73 | batch_size, num_chunks, chunk_size = input_ids.size() 74 | outputs = self.bert( 75 | input_ids.view(-1, chunk_size), 76 | attention_mask=attention_mask.view(-1, chunk_size), 77 | token_type_ids=token_type_ids.view(-1, chunk_size), 78 | position_ids=position_ids, 79 | head_mask=head_mask, 80 | inputs_embeds=inputs_embeds, 81 | output_attentions=output_attentions, 82 | output_hidden_states=output_hidden_states, 83 | return_dict=return_dict, 84 | ) 85 | 86 | if "cls" in self.model_mode: 87 | pooled_output = outputs[1].view(batch_size, num_chunks, -1) 88 | if self.model_mode == "cls-sum": 89 | pooled_output = pooled_output.sum(dim=1) 90 | elif self.model_mode == "cls-max": 91 | pooled_output = pooled_output.max(dim=1).values 92 | else: 93 | raise ValueError(f"model_mode {self.model_mode} not recognized") 94 | pooled_output = self.dropout(pooled_output) 95 | logits = self.classifier(pooled_output) 96 | elif "laat" in self.model_mode: 97 | if self.model_mode == "laat": 98 | hidden_output = outputs[0].view(batch_size, num_chunks*chunk_size, -1) 99 | elif self.model_mode == "laat-split": 100 | hidden_output = outputs[0].view(batch_size*num_chunks, chunk_size, -1) 101 | weights = torch.tanh(self.first_linear(hidden_output)) 102 | att_weights = self.second_linear(weights) 103 | att_weights = torch.nn.functional.softmax(att_weights, dim=1).transpose(1, 2) 104 | weighted_output = att_weights @ hidden_output 105 | logits = self.third_linear.weight.mul(weighted_output).sum(dim=2).add(self.third_linear.bias) 106 | if self.model_mode == "laat-split": 107 | logits = logits.view(batch_size, num_chunks, -1).max(dim=1).values 108 | else: 109 | raise ValueError(f"model_mode {self.model_mode} not recognized") 110 | 111 | loss = None 112 | if labels is not None: 113 | loss_fct = BCEWithLogitsLoss() 114 | loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1, self.num_labels)) 115 | 116 | if not return_dict: 117 | output = (logits,) + outputs[2:] 118 | return ((loss,) + output) if loss is not None else output 119 | 120 | return SequenceClassifierOutput( 121 | loss=loss, 122 | logits=logits, 123 | hidden_states=outputs.hidden_states, 124 | attentions=outputs.attentions, 125 | ) 126 | -------------------------------------------------------------------------------- /src/modeling_longformer.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Copyright 2020 The Allen Institute for AI team and The HuggingFace Inc. team. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | """PyTorch Longformer model. """ 16 | 17 | import math 18 | from dataclasses import dataclass 19 | from typing import Optional, Tuple 20 | 21 | import torch 22 | import torch.nn as nn 23 | import torch.utils.checkpoint 24 | from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss 25 | from torch.nn import functional as F 26 | 27 | from transformers import LongformerModel 28 | from transformers.models.longformer.modeling_longformer import LongformerSequenceClassifierOutput, LongformerPreTrainedModel 29 | 30 | 31 | class LongformerForMultilabelClassification(LongformerPreTrainedModel): 32 | 33 | _keys_to_ignore_on_load_unexpected = [r"pooler"] 34 | 35 | def __init__(self, config): 36 | super().__init__(config) 37 | self.num_labels = config.num_labels 38 | self.config = config 39 | 40 | self.longformer = LongformerModel(config, add_pooling_layer=False) 41 | self.first_linear = nn.Linear(config.hidden_size, config.hidden_size, bias=False) 42 | self.second_linear = nn.Linear(config.hidden_size, config.num_labels, bias=False) 43 | self.third_linear = nn.Linear(config.hidden_size, config.num_labels) 44 | 45 | self.init_weights() 46 | 47 | def forward( 48 | self, 49 | input_ids=None, 50 | attention_mask=None, 51 | global_attention_mask=None, 52 | head_mask=None, 53 | token_type_ids=None, 54 | position_ids=None, 55 | inputs_embeds=None, 56 | labels=None, 57 | output_attentions=None, 58 | output_hidden_states=None, 59 | return_dict=None, 60 | ): 61 | r""" 62 | labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`): 63 | Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ..., 64 | config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss), 65 | If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy). 66 | """ 67 | return_dict = return_dict if return_dict is not None else self.config.use_return_dict 68 | 69 | batch_size = input_ids.size(0) 70 | input_ids = input_ids.view(batch_size, -1) 71 | if attention_mask is not None: 72 | attention_mask = attention_mask.view(batch_size, -1) 73 | if token_type_ids is not None: 74 | token_type_ids = token_type_ids.view(batch_size, -1) 75 | 76 | if global_attention_mask is None: 77 | global_attention_mask = torch.zeros_like(input_ids) 78 | # global attention on cls token 79 | global_attention_mask[:, 0] = 1 80 | 81 | outputs = self.longformer( 82 | input_ids, 83 | attention_mask=attention_mask, 84 | global_attention_mask=global_attention_mask, 85 | head_mask=head_mask, 86 | token_type_ids=token_type_ids, 87 | position_ids=position_ids, 88 | inputs_embeds=inputs_embeds, 89 | output_attentions=output_attentions, 90 | output_hidden_states=output_hidden_states, 91 | return_dict=return_dict, 92 | ) 93 | hidden_output = outputs[0] 94 | weights = torch.tanh(self.first_linear(hidden_output)) 95 | att_weights = self.second_linear(weights) 96 | att_weights = torch.nn.functional.softmax(att_weights, dim=1).transpose(1, 2) 97 | weighted_output = att_weights @ hidden_output 98 | logits = self.third_linear.weight.mul(weighted_output).sum(dim=2).add(self.third_linear.bias) 99 | 100 | loss = None 101 | if labels is not None: 102 | loss_fct = BCEWithLogitsLoss() 103 | loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1, self.num_labels)) 104 | 105 | if not return_dict: 106 | output = (logits,) + outputs[2:] 107 | return ((loss,) + output) if loss is not None else output 108 | 109 | return LongformerSequenceClassifierOutput( 110 | loss=loss, 111 | logits=logits, 112 | hidden_states=outputs.hidden_states, 113 | attentions=outputs.attentions, 114 | global_attentions=outputs.global_attentions, 115 | ) 116 | -------------------------------------------------------------------------------- /src/modeling_roberta.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. 3 | # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | """PyTorch RoBERTa model. """ 17 | 18 | 19 | import math 20 | import os 21 | import warnings 22 | import logging 23 | from dataclasses import dataclass 24 | from typing import Optional, Tuple 25 | 26 | import torch 27 | import torch.utils.checkpoint 28 | from torch import nn 29 | from torch.nn import BCEWithLogitsLoss 30 | 31 | from transformers import RobertaModel 32 | from transformers.modeling_outputs import SequenceClassifierOutput 33 | from transformers.models.roberta.modeling_roberta import RobertaPreTrainedModel 34 | 35 | 36 | class RobertaForMultilabelClassification(RobertaPreTrainedModel): 37 | def __init__(self, config): 38 | super().__init__(config) 39 | self.num_labels = config.num_labels 40 | self.model_mode = config.model_mode 41 | 42 | self.roberta = RobertaModel(config, add_pooling_layer=False) 43 | self.dropout = nn.Dropout(config.hidden_dropout_prob) 44 | if "cls" in self.model_mode: 45 | self.classifier = nn.Linear(config.hidden_size, config.num_labels) 46 | elif "laat" in self.model_mode: 47 | self.first_linear = nn.Linear(config.hidden_size, config.hidden_size, bias=False) 48 | self.second_linear = nn.Linear(config.hidden_size, config.num_labels, bias=False) 49 | self.third_linear = nn.Linear(config.hidden_size, config.num_labels) 50 | else: 51 | raise ValueError(f"model_mode {self.model_mode} not recognized") 52 | 53 | self.init_weights() 54 | 55 | def forward( 56 | self, 57 | input_ids=None, 58 | attention_mask=None, 59 | token_type_ids=None, 60 | position_ids=None, 61 | head_mask=None, 62 | inputs_embeds=None, 63 | labels=None, 64 | output_attentions=None, 65 | output_hidden_states=None, 66 | return_dict=None, 67 | ): 68 | r""" 69 | input_ids (torch.LongTensor of shape (batch_size, num_chunks, chunk_size)) 70 | labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, num_labels)`, `optional`): 71 | """ 72 | return_dict = return_dict if return_dict is not None else self.config.use_return_dict 73 | 74 | batch_size, num_chunks, chunk_size = input_ids.size() 75 | outputs = self.roberta( 76 | input_ids.view(-1, chunk_size), 77 | attention_mask=attention_mask.view(-1, chunk_size) if attention_mask is not None else None, 78 | token_type_ids=token_type_ids.view(-1, chunk_size) if token_type_ids is not None else None, 79 | position_ids=position_ids, 80 | head_mask=head_mask, 81 | inputs_embeds=inputs_embeds, 82 | output_attentions=output_attentions, 83 | output_hidden_states=output_hidden_states, 84 | return_dict=return_dict, 85 | ) 86 | 87 | if "cls" in self.model_mode: 88 | pooled_output = outputs[1].view(batch_size, num_chunks, -1) 89 | if self.model_mode == "cls-sum": 90 | pooled_output = pooled_output.sum(dim=1) 91 | elif self.model_mode == "cls-max": 92 | pooled_output = pooled_output.max(dim=1).values 93 | else: 94 | raise ValueError(f"model_mode {self.model_mode} not recognized") 95 | pooled_output = self.dropout(pooled_output) 96 | logits = self.classifier(pooled_output) 97 | elif "laat" in self.model_mode: 98 | if self.model_mode == "laat": 99 | hidden_output = outputs[0].view(batch_size, num_chunks*chunk_size, -1) 100 | elif self.model_mode == "laat-split": 101 | hidden_output = outputs[0].view(batch_size*num_chunks, chunk_size, -1) 102 | weights = torch.tanh(self.first_linear(hidden_output)) 103 | att_weights = self.second_linear(weights) 104 | # att_weights.masked_fill_((attention_mask.view(batch_size, -1, 1)==0), -math.inf) 105 | att_weights = torch.nn.functional.softmax(att_weights, dim=1).transpose(1, 2) 106 | weighted_output = att_weights @ hidden_output 107 | logits = self.third_linear.weight.mul(weighted_output).sum(dim=2).add(self.third_linear.bias) 108 | if self.model_mode == "laat-split": 109 | logits = logits.view(batch_size, num_chunks, -1).max(dim=1).values 110 | else: 111 | raise ValueError(f"model_mode {self.model_mode} not recognized") 112 | 113 | loss = None 114 | if labels is not None: 115 | loss_fct = BCEWithLogitsLoss() 116 | loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1, self.num_labels)) 117 | 118 | if not return_dict: 119 | output = (logits,) + outputs[2:] 120 | return ((loss,) + output) if loss is not None else output 121 | 122 | return SequenceClassifierOutput( 123 | loss=loss, 124 | logits=logits, 125 | hidden_states=outputs.hidden_states, 126 | attentions=outputs.attentions, 127 | ) 128 | -------------------------------------------------------------------------------- /src/run_icd.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Copyright 2021 The HuggingFace Inc. team. All rights reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | """ Finetuning a 🤗 Transformers model for sequence classification on GLUE.""" 16 | import argparse 17 | import logging 18 | import math 19 | import os 20 | import random 21 | 22 | import datasets 23 | from datasets import load_dataset, load_metric 24 | from torch.utils.data.dataloader import DataLoader 25 | from tqdm.auto import tqdm 26 | 27 | import transformers 28 | import torch 29 | import numpy as np 30 | from accelerate import Accelerator, DistributedDataParallelKwargs 31 | from transformers import ( 32 | AdamW, 33 | AutoConfig, 34 | AutoModelForSequenceClassification, 35 | AutoTokenizer, 36 | SchedulerType, 37 | get_scheduler, 38 | set_seed, 39 | ) 40 | from modeling_bert import BertForMultilabelClassification 41 | from modeling_roberta import RobertaForMultilabelClassification 42 | from modeling_longformer import LongformerForMultilabelClassification 43 | from evaluation import all_metrics 44 | 45 | 46 | logger = logging.getLogger(__name__) 47 | 48 | 49 | MODELS_CLASSES = { 50 | 'bert': BertForMultilabelClassification, 51 | 'roberta': RobertaForMultilabelClassification, 52 | 'longformer': LongformerForMultilabelClassification 53 | } 54 | 55 | 56 | def parse_args(): 57 | parser = argparse.ArgumentParser(description="Finetune a transformers model on a text classification task") 58 | parser.add_argument( 59 | "--task_name", 60 | type=str, 61 | default=None, 62 | help="The name of the glue task to train on.", 63 | ) 64 | parser.add_argument( 65 | "--train_file", type=str, default=None, help="A csv or a json file containing the training data." 66 | ) 67 | parser.add_argument( 68 | "--validation_file", type=str, default=None, help="A csv or a json file containing the validation data." 69 | ) 70 | parser.add_argument( 71 | "--code_file", type=str, default=None, help="A txt file containing all codes." 72 | ) 73 | parser.add_argument( 74 | "--max_length", 75 | type=int, 76 | default=128, 77 | help=( 78 | "The maximum total input sequence length after tokenization. Sequences longer than this will be truncated," 79 | " sequences shorter will be padded if `--pad_to_max_lengh` is passed." 80 | ), 81 | ) 82 | parser.add_argument( 83 | "--chunk_size", 84 | type=int, 85 | default=256, 86 | help=( 87 | "The size of chunks that we'll split the inputs into" 88 | ), 89 | ) 90 | parser.add_argument( 91 | "--pad_to_max_length", 92 | action="store_true", 93 | help="If passed, pad all samples to `max_length`. Otherwise, dynamic padding is used.", 94 | ) 95 | parser.add_argument( 96 | "--model_name_or_path", 97 | type=str, 98 | help="Path to pretrained model or model identifier from huggingface.co/models.", 99 | required=True, 100 | ) 101 | parser.add_argument( 102 | "--model_type", 103 | type=str, 104 | help="The type of model", 105 | required=True, 106 | choices=["bert", "roberta", "longformer"] 107 | ) 108 | parser.add_argument( 109 | "--model_mode", 110 | type=str, 111 | help="Specify how to aggregate output in the model", 112 | required=True, 113 | choices=["cls-sum", "cls-max", "laat", "laat-split"] 114 | ) 115 | parser.add_argument( 116 | "--use_slow_tokenizer", 117 | action="store_true", 118 | help="If passed, will use a slow tokenizer (not backed by the 🤗 Tokenizers library).", 119 | ) 120 | parser.add_argument( 121 | "--cased", 122 | action="store_true", 123 | help="equivalent to do_lower_case=False", 124 | ) 125 | parser.add_argument( 126 | "--per_device_train_batch_size", 127 | type=int, 128 | default=8, 129 | help="Batch size (per device) for the training dataloader.", 130 | ) 131 | parser.add_argument( 132 | "--per_device_eval_batch_size", 133 | type=int, 134 | default=8, 135 | help="Batch size (per device) for the evaluation dataloader.", 136 | ) 137 | parser.add_argument( 138 | "--learning_rate", 139 | type=float, 140 | default=5e-5, 141 | help="Initial learning rate (after the potential warmup period) to use.", 142 | ) 143 | parser.add_argument("--weight_decay", type=float, default=0.0, help="Weight decay to use.") 144 | parser.add_argument("--num_train_epochs", type=int, default=3, help="Total number of training epochs to perform.") 145 | parser.add_argument( 146 | "--max_train_steps", 147 | type=int, 148 | default=None, 149 | help="Total number of training steps to perform. If provided, overrides num_train_epochs.", 150 | ) 151 | parser.add_argument( 152 | "--gradient_accumulation_steps", 153 | type=int, 154 | default=1, 155 | help="Number of updates steps to accumulate before performing a backward/update pass.", 156 | ) 157 | parser.add_argument( 158 | "--lr_scheduler_type", 159 | type=str, 160 | default="linear", 161 | help="The scheduler type to use.", 162 | choices=["linear", "cosine", "cosine_with_restarts", "polynomial", "constant", "constant_with_warmup"], 163 | ) 164 | parser.add_argument( 165 | "--num_warmup_steps", type=int, default=0, help="Number of steps for the warmup in the lr scheduler." 166 | ) 167 | parser.add_argument( 168 | "--code_50", action='store_true', help="use only top-50 codes" 169 | ) 170 | parser.add_argument("--output_dir", type=str, default=None, help="Where to store the final model.") 171 | parser.add_argument("--seed", type=int, default=None, help="A seed for reproducible training.") 172 | args = parser.parse_args() 173 | 174 | # Sanity checks 175 | if args.task_name is None and args.train_file is None and args.validation_file is None: 176 | raise ValueError("Need either a task name or a training/validation file.") 177 | else: 178 | if args.train_file is not None: 179 | extension = args.train_file.split(".")[-1] 180 | assert extension in ["csv", "json"], "`train_file` should be a csv or a json file." 181 | if args.validation_file is not None: 182 | extension = args.validation_file.split(".")[-1] 183 | assert extension in ["csv", "json"], "`validation_file` should be a csv or a json file." 184 | 185 | if args.output_dir is not None: 186 | os.makedirs(args.output_dir, exist_ok=True) 187 | 188 | return args 189 | 190 | 191 | def main(): 192 | args = parse_args() 193 | 194 | # Initialize the accelerator. We will let the accelerator handle device placement for us in this example. 195 | ddp_kwargs = DistributedDataParallelKwargs(find_unused_parameters=True) 196 | accelerator = Accelerator(kwargs_handlers=[ddp_kwargs]) 197 | # Make one log on every process with the configuration for debugging. 198 | logging.basicConfig( 199 | format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", 200 | datefmt="%m/%d/%Y %H:%M:%S", 201 | level=logging.INFO, 202 | ) 203 | logger.info(accelerator.state) 204 | 205 | # Setup logging, we only want one process per machine to log things on the screen. 206 | # accelerator.is_local_main_process is only True for one process per machine. 207 | logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR) 208 | if accelerator.is_local_main_process: 209 | datasets.utils.logging.set_verbosity_warning() 210 | transformers.utils.logging.set_verbosity_info() 211 | else: 212 | datasets.utils.logging.set_verbosity_error() 213 | transformers.utils.logging.set_verbosity_error() 214 | 215 | # If passed along, set the training seed now. 216 | if args.seed is not None: 217 | set_seed(args.seed) 218 | 219 | # Get the datasets: you can either provide your own CSV/JSON training and evaluation files (see below) 220 | # or specify a GLUE benchmark task (the dataset will be downloaded automatically from the datasets Hub). 221 | 222 | # For CSV/JSON files, this script will use as labels the column called 'label' and as pair of sentences the 223 | # sentences in columns called 'sentence1' and 'sentence2' if such column exists or the first two columns not named 224 | # label if at least two columns are provided. 225 | 226 | # If the CSVs/JSONs contain only one non-label column, the script does single sentence classification on this 227 | # single column. You can easily tweak this behavior (see below) 228 | 229 | # In distributed training, the load_dataset function guarantee that only one local process can concurrently 230 | # download the dataset. 231 | 232 | # Loading the dataset from local csv or json file. 233 | data_files = {} 234 | if args.train_file is not None: 235 | data_files["train"] = args.train_file 236 | if args.validation_file is not None: 237 | data_files["validation"] = args.validation_file 238 | extension = (args.train_file if args.train_file is not None else args.validation_file).split(".")[-1] 239 | raw_datasets = load_dataset(extension, data_files=data_files) 240 | # See more about loading any type of standard or custom dataset at 241 | # https://huggingface.co/docs/datasets/loading_datasets.html. 242 | 243 | # Labels 244 | # A useful fast method: 245 | # https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.unique 246 | labels = set() 247 | all_codes_file = "../data/mimic3/ALL_CODES.txt" if not args.code_50 else "../data/mimic3/ALL_CODES_50.txt" 248 | if args.code_file is not None: 249 | all_codes_file = args.code_file 250 | 251 | with open(all_codes_file, "r") as f: 252 | for line in f: 253 | if line.strip() != "": 254 | labels.add(line.strip()) 255 | label_list = sorted(list(labels)) 256 | num_labels = len(label_list) 257 | 258 | # Load pretrained model and tokenizer 259 | # 260 | # In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently 261 | # download model & vocab. 262 | config = AutoConfig.from_pretrained(args.model_name_or_path, num_labels=num_labels, finetuning_task=args.task_name) 263 | if args.model_type == "longformer": 264 | config.attention_window = args.chunk_size 265 | elif args.model_type in ["bert", "roberta"]: 266 | config.model_mode = args.model_mode 267 | tokenizer = AutoTokenizer.from_pretrained( 268 | args.model_name_or_path, 269 | use_fast=not args.use_slow_tokenizer, 270 | do_lower_case=not args.cased) 271 | model_class = MODELS_CLASSES[args.model_type] 272 | if args.num_train_epochs > 0: 273 | model = model_class.from_pretrained( 274 | args.model_name_or_path, 275 | from_tf=bool(".ckpt" in args.model_name_or_path), 276 | config=config, 277 | ) 278 | else: 279 | model = model_class.from_pretrained( 280 | args.output_dir, 281 | config=config, 282 | ) 283 | 284 | sentence1_key, sentence2_key = "text", None 285 | 286 | label_to_id = {v: i for i, v in enumerate(label_list)} 287 | 288 | padding = False 289 | 290 | def preprocess_function(examples): 291 | # Tokenize the texts 292 | texts = ( 293 | (examples[sentence1_key],) if sentence2_key is None else (examples[sentence1_key], examples[sentence2_key]) 294 | ) 295 | result = tokenizer(*texts, padding=padding, max_length=args.max_length, truncation=True, add_special_tokens="cls" not in args.model_mode) 296 | if "label" in examples: 297 | result["labels"] = examples["label"] 298 | result["label_ids"] = [[label_to_id[label.strip()] for label in labels.strip().split(';') if label.strip() != ""] if labels is not None else [] for labels in examples["label"]] 299 | return result 300 | 301 | remove_columns = raw_datasets["train"].column_names if args.train_file is not None else raw_datasets["validation"].column_names 302 | processed_datasets = raw_datasets.map( 303 | preprocess_function, batched=True, remove_columns=remove_columns 304 | ) 305 | 306 | eval_dataset = processed_datasets["validation"] 307 | 308 | if args.num_train_epochs > 0: 309 | train_dataset = processed_datasets["train"] 310 | # Log a few random samples from the training set: 311 | for index in random.sample(range(len(train_dataset)), 3): 312 | logger.info(f"Sample {index} of the training set: {train_dataset[index]}.") 313 | logger.info(f"Original tokens: {tokenizer.decode(train_dataset[index]['input_ids'])}") 314 | 315 | def data_collator(features): 316 | batch = dict() 317 | 318 | if "cls" in args.model_mode: 319 | for f in features: 320 | new_input_ids = [] 321 | for i in range(0, len(f["input_ids"]), args.chunk_size - 2): 322 | new_input_ids.extend([tokenizer.cls_token_id] + f["input_ids"][i:i+(args.chunk_size)-2] + [tokenizer.sep_token_id]) 323 | f["input_ids"] = new_input_ids 324 | f["attention_mask"] = [1] * len(f["input_ids"]) 325 | f["token_type_ids"] = [0] * len(f["input_ids"]) 326 | 327 | max_length = max([len(f["input_ids"]) for f in features]) 328 | if max_length % args.chunk_size != 0: 329 | max_length = max_length - (max_length % args.chunk_size) + args.chunk_size 330 | 331 | batch["input_ids"] = torch.tensor([ 332 | f["input_ids"] + [tokenizer.pad_token_id] * (max_length - len(f["input_ids"])) 333 | for f in features 334 | ]).contiguous().view((len(features), -1, args.chunk_size)) 335 | if "attention_mask" in features[0]: 336 | batch["attention_mask"] = torch.tensor([ 337 | f["attention_mask"] + [0] * (max_length - len(f["attention_mask"])) 338 | for f in features 339 | ]).contiguous().view((len(features), -1, args.chunk_size)) 340 | if "token_type_ids" in features[0]: 341 | batch["token_type_ids"] = torch.tensor([ 342 | f["token_type_ids"] + [0] * (max_length - len(f["token_type_ids"])) 343 | for f in features 344 | ]).contiguous().view((len(features), -1, args.chunk_size)) 345 | label_ids = torch.zeros((len(features), len(label_list))) 346 | for i, f in enumerate(features): 347 | for label in f["label_ids"]: 348 | label_ids[i, label] = 1 349 | batch["labels"] = label_ids 350 | return batch 351 | 352 | if args.num_train_epochs > 0: 353 | train_dataloader = DataLoader( 354 | train_dataset, shuffle=True, collate_fn=data_collator, batch_size=args.per_device_train_batch_size 355 | ) 356 | eval_dataloader = DataLoader(eval_dataset, collate_fn=data_collator, batch_size=args.per_device_eval_batch_size) 357 | 358 | # Optimizer 359 | # Split weights in two groups, one with weight decay and the other not. 360 | no_decay = ["bias", "LayerNorm.weight"] 361 | optimizer_grouped_parameters = [ 362 | { 363 | "params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], 364 | "weight_decay": args.weight_decay, 365 | }, 366 | { 367 | "params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 368 | "weight_decay": 0.0, 369 | }, 370 | ] 371 | optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate) 372 | 373 | # Prepare everything with our `accelerator`. 374 | model, optimizer, eval_dataloader = accelerator.prepare( 375 | model, optimizer, eval_dataloader 376 | ) 377 | if args.num_train_epochs > 0: 378 | train_dataloader = accelerator.prepare(train_dataloader) 379 | 380 | # Note -> the training dataloader needs to be prepared before we grab his length below (cause its length will be 381 | # shorter in multiprocess) 382 | 383 | # Scheduler and math around the number of training steps. 384 | num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps) 385 | if args.max_train_steps is None: 386 | args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch 387 | else: 388 | args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch) 389 | 390 | lr_scheduler = get_scheduler( 391 | name=args.lr_scheduler_type, 392 | optimizer=optimizer, 393 | num_warmup_steps=args.num_warmup_steps, 394 | num_training_steps=args.max_train_steps, 395 | ) 396 | 397 | # Get the metric function 398 | if args.task_name is not None: 399 | metric = load_metric("glue", args.task_name) 400 | 401 | if args.num_train_epochs > 0: 402 | # Train! 403 | total_batch_size = args.per_device_train_batch_size * accelerator.num_processes * args.gradient_accumulation_steps 404 | 405 | logger.info("***** Running training *****") 406 | logger.info(f" Num examples = {len(train_dataset)}") 407 | logger.info(f" Num Epochs = {args.num_train_epochs}") 408 | logger.info(f" Instantaneous batch size per device = {args.per_device_train_batch_size}") 409 | logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_batch_size}") 410 | logger.info(f" Gradient Accumulation steps = {args.gradient_accumulation_steps}") 411 | logger.info(f" Total optimization steps = {args.max_train_steps}") 412 | # Only show the progress bar once on each machine. 413 | progress_bar = tqdm(range(args.max_train_steps), disable=not accelerator.is_local_main_process) 414 | completed_steps = 0 415 | 416 | for epoch in tqdm(range(args.num_train_epochs)): 417 | model.train() 418 | epoch_loss = 0.0 419 | for step, batch in enumerate(train_dataloader): 420 | outputs = model(**batch) 421 | loss = outputs.loss 422 | loss = loss / args.gradient_accumulation_steps 423 | accelerator.backward(loss) 424 | epoch_loss += loss.item() 425 | if step % args.gradient_accumulation_steps == 0 or step == len(train_dataloader) - 1: 426 | optimizer.step() 427 | lr_scheduler.step() 428 | optimizer.zero_grad() 429 | progress_bar.update(1) 430 | completed_steps += 1 431 | progress_bar.set_postfix(loss=epoch_loss / completed_steps) 432 | 433 | if completed_steps >= args.max_train_steps: 434 | break 435 | 436 | model.eval() 437 | all_preds = [] 438 | all_preds_raw = [] 439 | all_labels = [] 440 | for step, batch in tqdm(enumerate(eval_dataloader)): 441 | with torch.no_grad(): 442 | outputs = model(**batch) 443 | preds_raw = outputs.logits.sigmoid().cpu() 444 | preds = (preds_raw > 0.5).int() 445 | all_preds_raw.extend(list(preds_raw)) 446 | all_preds.extend(list(preds)) 447 | all_labels.extend(list(batch["labels"].cpu().numpy())) 448 | 449 | all_preds_raw = np.stack(all_preds_raw) 450 | all_preds = np.stack(all_preds) 451 | all_labels = np.stack(all_labels) 452 | metrics = all_metrics(yhat=all_preds, y=all_labels, yhat_raw=all_preds_raw) 453 | logger.info(f"epoch {epoch} finished") 454 | logger.info(f"metrics: {metrics}") 455 | 456 | if args.num_train_epochs == 0 and accelerator.is_local_main_process: 457 | model.eval() 458 | all_preds = [] 459 | all_preds_raw = [] 460 | all_labels = [] 461 | for step, batch in enumerate(tqdm(eval_dataloader)): 462 | with torch.no_grad(): 463 | outputs = model(**batch) 464 | preds_raw = outputs.logits.sigmoid().cpu() 465 | preds = (preds_raw > 0.5).int() 466 | all_preds_raw.extend(list(preds_raw)) 467 | all_preds.extend(list(preds)) 468 | all_labels.extend(list(batch["labels"].cpu().numpy())) 469 | 470 | all_preds_raw = np.stack(all_preds_raw) 471 | all_preds = np.stack(all_preds) 472 | all_labels = np.stack(all_labels) 473 | metrics = all_metrics(yhat=all_preds, y=all_labels, yhat_raw=all_preds_raw) 474 | logger.info(f"evaluation finished") 475 | logger.info(f"metrics: {metrics}") 476 | for t in [0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5]: 477 | all_preds = (all_preds_raw > t).astype(int) 478 | metrics = all_metrics(yhat=all_preds, y=all_labels, yhat_raw=all_preds_raw, k=[5,8,15]) 479 | logger.info(f"metrics for threshold {t}: {metrics}") 480 | 481 | if args.output_dir is not None and args.num_train_epochs > 0: 482 | accelerator.wait_for_everyone() 483 | unwrapped_model = accelerator.unwrap_model(model) 484 | unwrapped_model.save_pretrained(args.output_dir, save_function=accelerator.save) 485 | 486 | 487 | if __name__ == "__main__": 488 | main() 489 | --------------------------------------------------------------------------------