├── Datasets ├── test.list ├── train.list └── validation.list ├── README.md ├── blocks ├── ASPP.py ├── EPSA.py ├── InvertedResidual.py ├── MulScale_Att.py ├── Soft_pooling.py ├── __pycache__ │ ├── ASPP.cpython-38.pyc │ ├── EPSA.cpython-38.pyc │ ├── InvertedResidual.cpython-38.pyc │ ├── MulScale_Att.cpython-38.pyc │ ├── Soft_pooling.cpython-38.pyc │ ├── reshape.cpython-38.pyc │ └── scale_attention.cpython-38.pyc ├── reshape.py └── scale_attention.py ├── dataset.py ├── losses_new.py ├── metrics.py ├── network.py ├── parser_arguments.py ├── utils ├── __pycache__ │ ├── dice_loss.cpython-38.pyc │ ├── dice_loss_github.cpython-38.pyc │ └── init.cpython-38.pyc ├── binary.py ├── dice_loss.py ├── dice_loss_github.py ├── evaluation.py ├── init.py └── transform.py └── val.py /Datasets/test.list: -------------------------------------------------------------------------------- 1 | ISIC_0012360.jpg 2 | ISIC_0013921.jpg 3 | ISIC_0014929.jpg 4 | ISIC_0010603.jpg 5 | ISIC_0014372.jpg 6 | ISIC_0013778.jpg 7 | ISIC_0012097.jpg 8 | ISIC_0012813.jpg 9 | ISIC_0014949.jpg 10 | ISIC_0009981.jpg 11 | ISIC_0014731.jpg 12 | ISIC_0015041.jpg 13 | ISIC_0003728.jpg 14 | ISIC_0000473.jpg 15 | ISIC_0010016.jpg 16 | ISIC_0015204.jpg 17 | ISIC_0013176.jpg 18 | ISIC_0000039.jpg 19 | ISIC_0014273.jpg 20 | ISIC_0006193.jpg 21 | ISIC_0011171.jpg 22 | ISIC_0010248.jpg 23 | ISIC_0013988.jpg 24 | ISIC_0014867.jpg 25 | ISIC_0014760.jpg 26 | ISIC_0002806.jpg 27 | ISIC_0013369.jpg 28 | ISIC_0014317.jpg 29 | ISIC_0000396.jpg 30 | ISIC_0014825.jpg 31 | ISIC_0002453.jpg 32 | ISIC_0010219.jpg 33 | ISIC_0016065.jpg 34 | ISIC_0013189.jpg 35 | ISIC_0012887.jpg 36 | ISIC_0011358.jpg 37 | ISIC_0014548.jpg 38 | ISIC_0000448.jpg 39 | ISIC_0000162.jpg 40 | ISIC_0012442.jpg 41 | ISIC_0000056.jpg 42 | ISIC_0015089.jpg 43 | ISIC_0013599.jpg 44 | ISIC_0013601.jpg 45 | ISIC_0000348.jpg 46 | ISIC_0012710.jpg 47 | ISIC_0005666.jpg 48 | ISIC_0013383.jpg 49 | ISIC_0010860.jpg 50 | ISIC_0008659.jpg 51 | ISIC_0000133.jpg 52 | ISIC_0014586.jpg 53 | ISIC_0000209.jpg 54 | ISIC_0013031.jpg 55 | ISIC_0000408.jpg 56 | ISIC_0000123.jpg 57 | ISIC_0010019.jpg 58 | ISIC_0013212.jpg 59 | ISIC_0015955.jpg 60 | ISIC_0000082.jpg 61 | ISIC_0015969.jpg 62 | ISIC_0015973.jpg 63 | ISIC_0000520.jpg 64 | ISIC_0011366.jpg 65 | ISIC_0002438.jpg 66 | ISIC_0000386.jpg 67 | ISIC_0010566.jpg 68 | ISIC_0000226.jpg 69 | ISIC_0014506.jpg 70 | ISIC_0014221.jpg 71 | ISIC_0010350.jpg 72 | ISIC_0012230.jpg 73 | ISIC_0007322.jpg 74 | ISIC_0000476.jpg 75 | ISIC_0001186.jpg 76 | ISIC_0013172.jpg 77 | ISIC_0013191.jpg 78 | ISIC_0000165.jpg 79 | ISIC_0012940.jpg 80 | ISIC_0014431.jpg 81 | ISIC_0014089.jpg 82 | ISIC_0015357.jpg 83 | ISIC_0013891.jpg 84 | ISIC_0013198.jpg 85 | ISIC_0012400.jpg 86 | ISIC_0011159.jpg 87 | ISIC_0013010.jpg 88 | ISIC_0010180.jpg 89 | ISIC_0015040.jpg 90 | ISIC_0015021.jpg 91 | ISIC_0014507.jpg 92 | ISIC_0000249.jpg 93 | ISIC_0000306.jpg 94 | ISIC_0013342.jpg 95 | ISIC_0013114.jpg 96 | ISIC_0013274.jpg 97 | ISIC_0013621.jpg 98 | ISIC_0014661.jpg 99 | ISIC_0000513.jpg 100 | ISIC_0009895.jpg 101 | ISIC_0014219.jpg 102 | ISIC_0014726.jpg 103 | ISIC_0013801.jpg 104 | ISIC_0013037.jpg 105 | ISIC_0014803.jpg 106 | ISIC_0012118.jpg 107 | ISIC_0000443.jpg 108 | ISIC_0000031.jpg 109 | ISIC_0013911.jpg 110 | ISIC_0000463.jpg 111 | ISIC_0012844.jpg 112 | ISIC_0013975.jpg 113 | ISIC_0010042.jpg 114 | ISIC_0010476.jpg 115 | ISIC_0000481.jpg 116 | ISIC_0014820.jpg 117 | ISIC_0012871.jpg 118 | ISIC_0013222.jpg 119 | ISIC_0015996.jpg 120 | ISIC_0014903.jpg 121 | ISIC_0000145.jpg 122 | ISIC_0000182.jpg 123 | ISIC_0015981.jpg 124 | ISIC_0011304.jpg 125 | ISIC_0013664.jpg 126 | ISIC_0014066.jpg 127 | ISIC_0013525.jpg 128 | ISIC_0010256.jpg 129 | ISIC_0000004.jpg 130 | ISIC_0010856.jpg 131 | ISIC_0000319.jpg 132 | ISIC_0000065.jpg 133 | ISIC_0012744.jpg 134 | ISIC_0015011.jpg 135 | ISIC_0000550.jpg 136 | ISIC_0014546.jpg 137 | ISIC_0000135.jpg 138 | ISIC_0015153.jpg 139 | ISIC_0013437.jpg 140 | ISIC_0000509.jpg 141 | ISIC_0002488.jpg 142 | ISIC_0015220.jpg 143 | ISIC_0000359.jpg 144 | ISIC_0000211.jpg 145 | ISIC_0013192.jpg 146 | ISIC_0009977.jpg 147 | ISIC_0014434.jpg 148 | ISIC_0010607.jpg 149 | ISIC_0000159.jpg 150 | ISIC_0000160.jpg 151 | ISIC_0014195.jpg 152 | ISIC_0002975.jpg 153 | ISIC_0009877.jpg 154 | ISIC_0009948.jpg 155 | ISIC_0001871.jpg 156 | ISIC_0013428.jpg 157 | ISIC_0011385.jpg 158 | ISIC_0014839.jpg 159 | ISIC_0010602.jpg 160 | ISIC_0009936.jpg 161 | ISIC_0011132.jpg 162 | ISIC_0013455.jpg 163 | ISIC_0013053.jpg 164 | ISIC_0010183.jpg 165 | ISIC_0015180.jpg 166 | ISIC_0014037.jpg 167 | ISIC_0014652.jpg 168 | ISIC_0015185.jpg 169 | ISIC_0000120.jpg 170 | ISIC_0011167.jpg 171 | ISIC_0000193.jpg 172 | ISIC_0012735.jpg 173 | ISIC_0016062.jpg 174 | ISIC_0008207.jpg 175 | ISIC_0014663.jpg 176 | ISIC_0009869.jpg 177 | ISIC_0013767.jpg 178 | ISIC_0014249.jpg 179 | ISIC_0000030.jpg 180 | ISIC_0013429.jpg 181 | ISIC_0013840.jpg 182 | ISIC_0000191.jpg 183 | ISIC_0014133.jpg 184 | ISIC_0014581.jpg 185 | ISIC_0010570.jpg 186 | ISIC_0014898.jpg 187 | ISIC_0000900.jpg 188 | ISIC_0010182.jpg 189 | ISIC_0014770.jpg 190 | ISIC_0015951.jpg 191 | ISIC_0010468.jpg 192 | ISIC_0009933.jpg 193 | ISIC_0015962.jpg 194 | ISIC_0013480.jpg 195 | ISIC_0011104.jpg 196 | ISIC_0012278.jpg 197 | ISIC_0000268.jpg 198 | ISIC_0015295.jpg 199 | ISIC_0000124.jpg 200 | ISIC_0011127.jpg 201 | ISIC_0012681.jpg 202 | ISIC_0014762.jpg 203 | ISIC_0012981.jpg 204 | ISIC_0009909.jpg 205 | ISIC_0014617.jpg 206 | ISIC_0000142.jpg 207 | ISIC_0010467.jpg 208 | ISIC_0009904.jpg 209 | ISIC_0013995.jpg 210 | ISIC_0010088.jpg 211 | ISIC_0016045.jpg 212 | ISIC_0009914.jpg 213 | ISIC_0009961.jpg 214 | ISIC_0010443.jpg 215 | ISIC_0011084.jpg 216 | ISIC_0000522.jpg 217 | ISIC_0010337.jpg 218 | ISIC_0013045.jpg 219 | ISIC_0010015.jpg 220 | ISIC_0000263.jpg 221 | ISIC_0013027.jpg 222 | ISIC_0014931.jpg 223 | ISIC_0012989.jpg 224 | ISIC_0012987.jpg 225 | ISIC_0000141.jpg 226 | ISIC_0014395.jpg 227 | ISIC_0013987.jpg 228 | ISIC_0010474.jpg 229 | ISIC_0012548.jpg 230 | ISIC_0010202.jpg 231 | ISIC_0013147.jpg 232 | ISIC_0010267.jpg 233 | ISIC_0009992.jpg 234 | ISIC_0000176.jpg 235 | ISIC_0013606.jpg 236 | ISIC_0014478.jpg 237 | ISIC_0004715.jpg 238 | ISIC_0000059.jpg 239 | ISIC_0008347.jpg 240 | ISIC_0012381.jpg 241 | ISIC_0010213.jpg 242 | ISIC_0014897.jpg 243 | ISIC_0014802.jpg 244 | ISIC_0014812.jpg 245 | ISIC_0000009.jpg 246 | ISIC_0012658.jpg 247 | ISIC_0001247.jpg 248 | ISIC_0010240.jpg 249 | ISIC_0014665.jpg 250 | ISIC_0014410.jpg 251 | ISIC_0010206.jpg 252 | ISIC_0013235.jpg 253 | ISIC_0010565.jpg 254 | ISIC_0000069.jpg 255 | ISIC_0000207.jpg 256 | ISIC_0010588.jpg 257 | ISIC_0013792.jpg 258 | ISIC_0010441.jpg 259 | ISIC_0010044.jpg 260 | ISIC_0000164.jpg 261 | ISIC_0013696.jpg 262 | ISIC_0012803.jpg 263 | ISIC_0014331.jpg 264 | ISIC_0014702.jpg 265 | ISIC_0012903.jpg 266 | ISIC_0010488.jpg 267 | ISIC_0011353.jpg 268 | ISIC_0012095.jpg 269 | ISIC_0015146.jpg 270 | ISIC_0013112.jpg 271 | ISIC_0012318.jpg 272 | ISIC_0000390.jpg 273 | ISIC_0015631.jpg 274 | ISIC_0000245.jpg 275 | ISIC_0014419.jpg 276 | ISIC_0013438.jpg 277 | ISIC_0000490.jpg 278 | ISIC_0012672.jpg 279 | ISIC_0013966.jpg 280 | ISIC_0000213.jpg 281 | ISIC_0000368.jpg 282 | ISIC_0012382.jpg 283 | ISIC_0014453.jpg 284 | ISIC_0013414.jpg 285 | ISIC_0009344.jpg 286 | ISIC_0015175.jpg 287 | ISIC_0011207.jpg 288 | ISIC_0010591.jpg 289 | ISIC_0000146.jpg 290 | ISIC_0011326.jpg 291 | ISIC_0013012.jpg 292 | ISIC_0008552.jpg 293 | ISIC_0014992.jpg 294 | ISIC_0016064.jpg 295 | ISIC_0007760.jpg 296 | ISIC_0015964.jpg 297 | ISIC_0010348.jpg 298 | ISIC_0013639.jpg 299 | ISIC_0013489.jpg 300 | ISIC_0012521.jpg 301 | ISIC_0000097.jpg 302 | ISIC_0016037.jpg 303 | ISIC_0012086.jpg 304 | ISIC_0013961.jpg 305 | ISIC_0000287.jpg 306 | ISIC_0014503.jpg 307 | ISIC_0014502.jpg 308 | ISIC_0013443.jpg 309 | ISIC_0013226.jpg 310 | ISIC_0012259.jpg 311 | ISIC_0000530.jpg 312 | ISIC_0015251.jpg 313 | ISIC_0014163.jpg 314 | ISIC_0014987.jpg 315 | ISIC_0016052.jpg 316 | ISIC_0000087.jpg 317 | ISIC_0010480.jpg 318 | ISIC_0000037.jpg 319 | ISIC_0014511.jpg 320 | ISIC_0013015.jpg 321 | ISIC_0009946.jpg 322 | ISIC_0013656.jpg 323 | ISIC_0010334.jpg 324 | ISIC_0012303.jpg 325 | ISIC_0008879.jpg 326 | ISIC_0013494.jpg 327 | ISIC_0014270.jpg 328 | ISIC_0013983.jpg 329 | ISIC_0011131.jpg 330 | ISIC_0009951.jpg 331 | ISIC_0000491.jpg 332 | ISIC_0014225.jpg 333 | ISIC_0010362.jpg 334 | ISIC_0000469.jpg 335 | ISIC_0015270.jpg 336 | ISIC_0000122.jpg 337 | ISIC_0000351.jpg 338 | ISIC_0013319.jpg 339 | ISIC_0009884.jpg 340 | ISIC_0012508.jpg 341 | ISIC_0013124.jpg 342 | ISIC_0015113.jpg 343 | ISIC_0015136.jpg 344 | ISIC_0002093.jpg 345 | ISIC_0000380.jpg 346 | ISIC_0013675.jpg 347 | ISIC_0014785.jpg 348 | ISIC_0013233.jpg 349 | ISIC_0013196.jpg 350 | ISIC_0009970.jpg 351 | ISIC_0015627.jpg 352 | ISIC_0010231.jpg 353 | ISIC_0000230.jpg 354 | ISIC_0000379.jpg 355 | ISIC_0014127.jpg 356 | ISIC_0016036.jpg 357 | ISIC_0014814.jpg 358 | ISIC_0000361.jpg 359 | ISIC_0013862.jpg 360 | ISIC_0015163.jpg 361 | ISIC_0015291.jpg 362 | ISIC_0013594.jpg 363 | ISIC_0014724.jpg 364 | ISIC_0014964.jpg 365 | ISIC_0000096.jpg 366 | ISIC_0011356.jpg 367 | ISIC_0015958.jpg 368 | ISIC_0015641.jpg 369 | ISIC_0011143.jpg 370 | ISIC_0015946.jpg 371 | ISIC_0000387.jpg 372 | ISIC_0000179.jpg 373 | ISIC_0010344.jpg 374 | ISIC_0012700.jpg 375 | ISIC_0014664.jpg 376 | ISIC_0013082.jpg 377 | ISIC_0016058.jpg 378 | ISIC_0013216.jpg 379 | ISIC_0013572.jpg 380 | ISIC_0000045.jpg 381 | ISIC_0009252.jpg 382 | ISIC_0010351.jpg 383 | ISIC_0013917.jpg 384 | ISIC_0010477.jpg 385 | ISIC_0000254.jpg 386 | ISIC_0011306.jpg 387 | ISIC_0015166.jpg 388 | ISIC_0013075.jpg 389 | ISIC_0009929.jpg 390 | ISIC_0012116.jpg 391 | ISIC_0012258.jpg 392 | ISIC_0013636.jpg 393 | ISIC_0013568.jpg 394 | ISIC_0014815.jpg 395 | ISIC_0011102.jpg 396 | ISIC_0011146.jpg 397 | ISIC_0014848.jpg 398 | ISIC_0014782.jpg 399 | ISIC_0014844.jpg 400 | ISIC_0000110.jpg 401 | ISIC_0009925.jpg 402 | ISIC_0014518.jpg 403 | ISIC_0013800.jpg 404 | ISIC_0015544.jpg 405 | ISIC_0015216.jpg 406 | ISIC_0013935.jpg 407 | ISIC_0014868.jpg 408 | ISIC_0011135.jpg 409 | ISIC_0013187.jpg 410 | ISIC_0011336.jpg 411 | ISIC_0013288.jpg 412 | ISIC_0001133.jpg 413 | ISIC_0014169.jpg 414 | ISIC_0010176.jpg 415 | ISIC_0013400.jpg 416 | ISIC_0011112.jpg 417 | ISIC_0000315.jpg 418 | ISIC_0015245.jpg 419 | ISIC_0012904.jpg 420 | ISIC_0001423.jpg 421 | ISIC_0013553.jpg 422 | ISIC_0011383.jpg 423 | ISIC_0009995.jpg 424 | ISIC_0014173.jpg 425 | ISIC_0010212.jpg 426 | ISIC_0014603.jpg 427 | ISIC_0013024.jpg 428 | ISIC_0010370.jpg 429 | ISIC_0014850.jpg 430 | ISIC_0013052.jpg 431 | ISIC_0010589.jpg 432 | ISIC_0013740.jpg 433 | ISIC_0000223.jpg 434 | ISIC_0015994.jpg 435 | ISIC_0010596.jpg 436 | ISIC_0012950.jpg 437 | ISIC_0010573.jpg 438 | ISIC_0014633.jpg 439 | ISIC_0000457.jpg 440 | ISIC_0002885.jpg 441 | ISIC_0010487.jpg 442 | ISIC_0015276.jpg 443 | ISIC_0009942.jpg 444 | ISIC_0015963.jpg 445 | ISIC_0014708.jpg 446 | ISIC_0010220.jpg 447 | ISIC_0014337.jpg 448 | ISIC_0010374.jpg 449 | ISIC_0009950.jpg 450 | ISIC_0011230.jpg 451 | ISIC_0000042.jpg 452 | ISIC_0000221.jpg 453 | ISIC_0000068.jpg 454 | ISIC_0012338.jpg 455 | ISIC_0013797.jpg 456 | ISIC_0013602.jpg 457 | ISIC_0000322.jpg 458 | ISIC_0012448.jpg 459 | ISIC_0013433.jpg 460 | ISIC_0012699.jpg 461 | ISIC_0010232.jpg 462 | ISIC_0012272.jpg 463 | ISIC_0001105.jpg 464 | ISIC_0000538.jpg 465 | ISIC_0013695.jpg 466 | ISIC_0011203.jpg 467 | ISIC_0013048.jpg 468 | ISIC_0009599.jpg 469 | ISIC_0014932.jpg 470 | ISIC_0015995.jpg 471 | ISIC_0013561.jpg 472 | ISIC_0014969.jpg 473 | ISIC_0014181.jpg 474 | ISIC_0012298.jpg 475 | ISIC_0010032.jpg 476 | ISIC_0009949.jpg 477 | ISIC_0012309.jpg 478 | ISIC_0011211.jpg 479 | ISIC_0015176.jpg 480 | ISIC_0014945.jpg 481 | ISIC_0014490.jpg 482 | ISIC_0012334.jpg 483 | ISIC_0014290.jpg 484 | ISIC_0009953.jpg 485 | ISIC_0013277.jpg 486 | ISIC_0010557.jpg 487 | ISIC_0003539.jpg 488 | ISIC_0000139.jpg 489 | ISIC_0012493.jpg 490 | ISIC_0000442.jpg 491 | ISIC_0013140.jpg 492 | ISIC_0015416.jpg 493 | ISIC_0010061.jpg 494 | ISIC_0004337.jpg 495 | ISIC_0011292.jpg 496 | ISIC_0010458.jpg 497 | ISIC_0011176.jpg 498 | ISIC_0000121.jpg 499 | ISIC_0015208.jpg 500 | ISIC_0010452.jpg 501 | ISIC_0012806.jpg 502 | ISIC_0015160.jpg 503 | ISIC_0015403.jpg 504 | ISIC_0000413.jpg 505 | ISIC_0014943.jpg 506 | ISIC_0000275.jpg 507 | ISIC_0000297.jpg 508 | ISIC_0015401.jpg 509 | ISIC_0012092.jpg 510 | ISIC_0012464.jpg 511 | ISIC_0015046.jpg 512 | ISIC_0000151.jpg 513 | ISIC_0002459.jpg 514 | ISIC_0014255.jpg 515 | ISIC_0015968.jpg 516 | ISIC_0014818.jpg 517 | ISIC_0012256.jpg 518 | ISIC_0001449.jpg 519 | -------------------------------------------------------------------------------- /Datasets/train.list: -------------------------------------------------------------------------------- 1 | ISIC_0000499.jpg 2 | ISIC_0000012.jpg 3 | ISIC_0010318.jpg 4 | ISIC_0000210.jpg 5 | ISIC_0012332.jpg 6 | ISIC_0014723.jpg 7 | ISIC_0014501.jpg 8 | ISIC_0000202.jpg 9 | ISIC_0014910.jpg 10 | ISIC_0000384.jpg 11 | ISIC_0007087.jpg 12 | ISIC_0001292.jpg 13 | ISIC_0012721.jpg 14 | ISIC_0015064.jpg 15 | ISIC_0015986.jpg 16 | ISIC_0013136.jpg 17 | ISIC_0011322.jpg 18 | ISIC_0000217.jpg 19 | ISIC_0011297.jpg 20 | ISIC_0014834.jpg 21 | ISIC_0009928.jpg 22 | ISIC_0013958.jpg 23 | ISIC_0000456.jpg 24 | ISIC_0012282.jpg 25 | ISIC_0014132.jpg 26 | ISIC_0013371.jpg 27 | ISIC_0012814.jpg 28 | ISIC_0013618.jpg 29 | ISIC_0001140.jpg 30 | ISIC_0001163.jpg 31 | ISIC_0001254.jpg 32 | ISIC_0000203.jpg 33 | ISIC_0013106.jpg 34 | ISIC_0014851.jpg 35 | ISIC_0009973.jpg 36 | ISIC_0014558.jpg 37 | ISIC_0012529.jpg 38 | ISIC_0000224.jpg 39 | ISIC_0011214.jpg 40 | ISIC_0009974.jpg 41 | ISIC_0012109.jpg 42 | ISIC_0000418.jpg 43 | ISIC_0015190.jpg 44 | ISIC_0000089.jpg 45 | ISIC_0000882.jpg 46 | ISIC_0000496.jpg 47 | ISIC_0014921.jpg 48 | ISIC_0010360.jpg 49 | ISIC_0008528.jpg 50 | ISIC_0011095.jpg 51 | ISIC_0000420.jpg 52 | ISIC_0000092.jpg 53 | ISIC_0007788.jpg 54 | ISIC_0015223.jpg 55 | ISIC_0015965.jpg 56 | ISIC_0000486.jpg 57 | ISIC_0000326.jpg 58 | ISIC_0013644.jpg 59 | ISIC_0000466.jpg 60 | ISIC_0015215.jpg 61 | ISIC_0013403.jpg 62 | ISIC_0009967.jpg 63 | ISIC_0011098.jpg 64 | ISIC_0000034.jpg 65 | ISIC_0000155.jpg 66 | ISIC_0000344.jpg 67 | ISIC_0013830.jpg 68 | ISIC_0009988.jpg 69 | ISIC_0014543.jpg 70 | ISIC_0014274.jpg 71 | ISIC_0000040.jpg 72 | ISIC_0000072.jpg 73 | ISIC_0010319.jpg 74 | ISIC_0000330.jpg 75 | ISIC_0012089.jpg 76 | ISIC_0009889.jpg 77 | ISIC_0010496.jpg 78 | ISIC_0013491.jpg 79 | ISIC_0010567.jpg 80 | ISIC_0014440.jpg 81 | ISIC_0000091.jpg 82 | ISIC_0013025.jpg 83 | ISIC_0000519.jpg 84 | ISIC_0016070.jpg 85 | ISIC_0013306.jpg 86 | ISIC_0000425.jpg 87 | ISIC_0011114.jpg 88 | ISIC_0015161.jpg 89 | ISIC_0010184.jpg 90 | ISIC_0015018.jpg 91 | ISIC_0001306.jpg 92 | ISIC_0000439.jpg 93 | ISIC_0010481.jpg 94 | ISIC_0013595.jpg 95 | ISIC_0010327.jpg 96 | ISIC_0010023.jpg 97 | ISIC_0000214.jpg 98 | ISIC_0012248.jpg 99 | ISIC_0013311.jpg 100 | ISIC_0016050.jpg 101 | ISIC_0014698.jpg 102 | ISIC_0012911.jpg 103 | ISIC_0015466.jpg 104 | ISIC_0013518.jpg 105 | ISIC_0012324.jpg 106 | ISIC_0010574.jpg 107 | ISIC_0000301.jpg 108 | ISIC_0000187.jpg 109 | ISIC_0013378.jpg 110 | ISIC_0009920.jpg 111 | ISIC_0000170.jpg 112 | ISIC_0010457.jpg 113 | ISIC_0016043.jpg 114 | ISIC_0000067.jpg 115 | ISIC_0010497.jpg 116 | ISIC_0012526.jpg 117 | ISIC_0014489.jpg 118 | ISIC_0000501.jpg 119 | ISIC_0016063.jpg 120 | ISIC_0000276.jpg 121 | ISIC_0012478.jpg 122 | ISIC_0014716.jpg 123 | ISIC_0000518.jpg 124 | ISIC_0010455.jpg 125 | ISIC_0008116.jpg 126 | ISIC_0014696.jpg 127 | ISIC_0013970.jpg 128 | ISIC_0010034.jpg 129 | ISIC_0015417.jpg 130 | ISIC_0013492.jpg 131 | ISIC_0014955.jpg 132 | ISIC_0014171.jpg 133 | ISIC_0012683.jpg 134 | ISIC_0012204.jpg 135 | ISIC_0013217.jpg 136 | ISIC_0014299.jpg 137 | ISIC_0002976.jpg 138 | ISIC_0000137.jpg 139 | ISIC_0013159.jpg 140 | ISIC_0000108.jpg 141 | ISIC_0016027.jpg 142 | ISIC_0010017.jpg 143 | ISIC_0015476.jpg 144 | ISIC_0014382.jpg 145 | ISIC_0001286.jpg 146 | ISIC_0014046.jpg 147 | ISIC_0016025.jpg 148 | ISIC_0013007.jpg 149 | ISIC_0015150.jpg 150 | ISIC_0012164.jpg 151 | ISIC_0012257.jpg 152 | ISIC_0000358.jpg 153 | ISIC_0015008.jpg 154 | ISIC_0000341.jpg 155 | ISIC_0000480.jpg 156 | ISIC_0014027.jpg 157 | ISIC_0015353.jpg 158 | ISIC_0010587.jpg 159 | ISIC_0015144.jpg 160 | ISIC_0013000.jpg 161 | ISIC_0011090.jpg 162 | ISIC_0014651.jpg 163 | ISIC_0011301.jpg 164 | ISIC_0013227.jpg 165 | ISIC_0014831.jpg 166 | ISIC_0010472.jpg 167 | ISIC_0015016.jpg 168 | ISIC_0016024.jpg 169 | ISIC_0014720.jpg 170 | ISIC_0000294.jpg 171 | ISIC_0000363.jpg 172 | ISIC_0011212.jpg 173 | ISIC_0012099.jpg 174 | ISIC_0014361.jpg 175 | ISIC_0013831.jpg 176 | ISIC_0011157.jpg 177 | ISIC_0006021.jpg 178 | ISIC_0009960.jpg 179 | ISIC_0012406.jpg 180 | ISIC_0003308.jpg 181 | ISIC_0012739.jpg 182 | ISIC_0014809.jpg 183 | ISIC_0000204.jpg 184 | ISIC_0000014.jpg 185 | ISIC_0015603.jpg 186 | ISIC_0011367.jpg 187 | ISIC_0013832.jpg 188 | ISIC_0000423.jpg 189 | ISIC_0000128.jpg 190 | ISIC_0015978.jpg 191 | ISIC_0014049.jpg 192 | ISIC_0013581.jpg 193 | ISIC_0014248.jpg 194 | ISIC_0014632.jpg 195 | ISIC_0010186.jpg 196 | ISIC_0011158.jpg 197 | ISIC_0010571.jpg 198 | ISIC_0015125.jpg 199 | ISIC_0013258.jpg 200 | ISIC_0014805.jpg 201 | ISIC_0013516.jpg 202 | ISIC_0000063.jpg 203 | ISIC_0011088.jpg 204 | ISIC_0000026.jpg 205 | ISIC_0000057.jpg 206 | ISIC_0013304.jpg 207 | ISIC_0013577.jpg 208 | ISIC_0000556.jpg 209 | ISIC_0013798.jpg 210 | ISIC_0000346.jpg 211 | ISIC_0014677.jpg 212 | ISIC_0009915.jpg 213 | ISIC_0011330.jpg 214 | ISIC_0012810.jpg 215 | ISIC_0012136.jpg 216 | ISIC_0015568.jpg 217 | ISIC_0000036.jpg 218 | ISIC_0008600.jpg 219 | ISIC_0014694.jpg 220 | ISIC_0012323.jpg 221 | ISIC_0012378.jpg 222 | ISIC_0000388.jpg 223 | ISIC_0012167.jpg 224 | ISIC_0009939.jpg 225 | ISIC_0010341.jpg 226 | ISIC_0014470.jpg 227 | ISIC_0013688.jpg 228 | ISIC_0012369.jpg 229 | ISIC_0015203.jpg 230 | ISIC_0015078.jpg 231 | ISIC_0000239.jpg 232 | ISIC_0012299.jpg 233 | ISIC_0011129.jpg 234 | ISIC_0011170.jpg 235 | ISIC_0014537.jpg 236 | ISIC_0015109.jpg 237 | ISIC_0015019.jpg 238 | ISIC_0010011.jpg 239 | ISIC_0011345.jpg 240 | ISIC_0014433.jpg 241 | ISIC_0014740.jpg 242 | ISIC_0014144.jpg 243 | ISIC_0013425.jpg 244 | ISIC_0012512.jpg 245 | ISIC_0013104.jpg 246 | ISIC_0014162.jpg 247 | ISIC_0014693.jpg 248 | ISIC_0010236.jpg 249 | ISIC_0012333.jpg 250 | ISIC_0010192.jpg 251 | ISIC_0007156.jpg 252 | ISIC_0012475.jpg 253 | ISIC_0015953.jpg 254 | ISIC_0000081.jpg 255 | ISIC_0010046.jpg 256 | ISIC_0012705.jpg 257 | ISIC_0015152.jpg 258 | ISIC_0012706.jpg 259 | ISIC_0012506.jpg 260 | ISIC_0014692.jpg 261 | ISIC_0014944.jpg 262 | ISIC_0014554.jpg 263 | ISIC_0013805.jpg 264 | ISIC_0013986.jpg 265 | ISIC_0000262.jpg 266 | ISIC_0000061.jpg 267 | ISIC_0010094.jpg 268 | ISIC_0014158.jpg 269 | ISIC_0015614.jpg 270 | ISIC_0012469.jpg 271 | ISIC_0010326.jpg 272 | ISIC_0011339.jpg 273 | ISIC_0014715.jpg 274 | ISIC_0010255.jpg 275 | ISIC_0010475.jpg 276 | ISIC_0000506.jpg 277 | ISIC_0000524.jpg 278 | ISIC_0010852.jpg 279 | ISIC_0014108.jpg 280 | ISIC_0013365.jpg 281 | ISIC_0013167.jpg 282 | ISIC_0014366.jpg 283 | ISIC_0012263.jpg 284 | ISIC_0015171.jpg 285 | ISIC_0000545.jpg 286 | ISIC_0013155.jpg 287 | ISIC_0012852.jpg 288 | ISIC_0000492.jpg 289 | ISIC_0012538.jpg 290 | ISIC_0013898.jpg 291 | ISIC_0012271.jpg 292 | ISIC_0000074.jpg 293 | ISIC_0014545.jpg 294 | ISIC_0000345.jpg 295 | ISIC_0015034.jpg 296 | ISIC_0011121.jpg 297 | ISIC_0014263.jpg 298 | ISIC_0009931.jpg 299 | ISIC_0012877.jpg 300 | ISIC_0011333.jpg 301 | ISIC_0013032.jpg 302 | ISIC_0011205.jpg 303 | ISIC_0014771.jpg 304 | ISIC_0013596.jpg 305 | ISIC_0000032.jpg 306 | ISIC_0010861.jpg 307 | ISIC_0010215.jpg 308 | ISIC_0015993.jpg 309 | ISIC_0014830.jpg 310 | ISIC_0013334.jpg 311 | ISIC_0011164.jpg 312 | ISIC_0009991.jpg 313 | ISIC_0007693.jpg 314 | ISIC_0011224.jpg 315 | ISIC_0000412.jpg 316 | ISIC_0000316.jpg 317 | ISIC_0015383.jpg 318 | ISIC_0010037.jpg 319 | ISIC_0012221.jpg 320 | ISIC_0014288.jpg 321 | ISIC_0013748.jpg 322 | ISIC_0014513.jpg 323 | ISIC_0015395.jpg 324 | ISIC_0015481.jpg 325 | ISIC_0009860.jpg 326 | ISIC_0012148.jpg 327 | ISIC_0001131.jpg 328 | ISIC_0014829.jpg 329 | ISIC_0014360.jpg 330 | ISIC_0012518.jpg 331 | ISIC_0012320.jpg 332 | ISIC_0000116.jpg 333 | ISIC_0015966.jpg 334 | ISIC_0015243.jpg 335 | ISIC_0016071.jpg 336 | ISIC_0012656.jpg 337 | ISIC_0009564.jpg 338 | ISIC_0013001.jpg 339 | ISIC_0014222.jpg 340 | ISIC_0015947.jpg 341 | ISIC_0008280.jpg 342 | ISIC_0010380.jpg 343 | ISIC_0015971.jpg 344 | ISIC_0012108.jpg 345 | ISIC_0015082.jpg 346 | ISIC_0014869.jpg 347 | ISIC_0014150.jpg 348 | ISIC_0014600.jpg 349 | ISIC_0013874.jpg 350 | ISIC_0015167.jpg 351 | ISIC_0014211.jpg 352 | ISIC_0011363.jpg 353 | ISIC_0013896.jpg 354 | ISIC_0010490.jpg 355 | ISIC_0015212.jpg 356 | ISIC_0012669.jpg 357 | ISIC_0015411.jpg 358 | ISIC_0009035.jpg 359 | ISIC_0014697.jpg 360 | ISIC_0014475.jpg 361 | ISIC_0012897.jpg 362 | ISIC_0006914.jpg 363 | ISIC_0014238.jpg 364 | ISIC_0009533.jpg 365 | ISIC_0000483.jpg 366 | ISIC_0014798.jpg 367 | ISIC_0000329.jpg 368 | ISIC_0009083.jpg 369 | ISIC_0016028.jpg 370 | ISIC_0011382.jpg 371 | ISIC_0012856.jpg 372 | ISIC_0015060.jpg 373 | ISIC_0010013.jpg 374 | ISIC_0010442.jpg 375 | ISIC_0010239.jpg 376 | ISIC_0000474.jpg 377 | ISIC_0012363.jpg 378 | ISIC_0012888.jpg 379 | ISIC_0009921.jpg 380 | ISIC_0014658.jpg 381 | ISIC_0001367.jpg 382 | ISIC_0001213.jpg 383 | ISIC_0014786.jpg 384 | ISIC_0012496.jpg 385 | ISIC_0007557.jpg 386 | ISIC_0001204.jpg 387 | ISIC_0010493.jpg 388 | ISIC_0013796.jpg 389 | ISIC_0015298.jpg 390 | ISIC_0010009.jpg 391 | ISIC_0014853.jpg 392 | ISIC_0000264.jpg 393 | ISIC_0015975.jpg 394 | ISIC_0000104.jpg 395 | ISIC_0012434.jpg 396 | ISIC_0010340.jpg 397 | ISIC_0012155.jpg 398 | ISIC_0013364.jpg 399 | ISIC_0013184.jpg 400 | ISIC_0011092.jpg 401 | ISIC_0014729.jpg 402 | ISIC_0014983.jpg 403 | ISIC_0012453.jpg 404 | ISIC_0001212.jpg 405 | ISIC_0015390.jpg 406 | ISIC_0013828.jpg 407 | ISIC_0011085.jpg 408 | ISIC_0013360.jpg 409 | ISIC_0010177.jpg 410 | ISIC_0000391.jpg 411 | ISIC_0000303.jpg 412 | ISIC_0014166.jpg 413 | ISIC_0015510.jpg 414 | ISIC_0014728.jpg 415 | ISIC_0015537.jpg 416 | ISIC_0014946.jpg 417 | ISIC_0010081.jpg 418 | ISIC_0015526.jpg 419 | ISIC_0012901.jpg 420 | ISIC_0009958.jpg 421 | ISIC_0013673.jpg 422 | ISIC_0010059.jpg 423 | ISIC_0014775.jpg 424 | ISIC_0000542.jpg 425 | ISIC_0013794.jpg 426 | ISIC_0008294.jpg 427 | ISIC_0013580.jpg 428 | ISIC_0000453.jpg 429 | ISIC_0009945.jpg 430 | ISIC_0012481.jpg 431 | ISIC_0014769.jpg 432 | ISIC_0010445.jpg 433 | ISIC_0010357.jpg 434 | ISIC_0012384.jpg 435 | ISIC_0012684.jpg 436 | ISIC_0011089.jpg 437 | ISIC_0000011.jpg 438 | ISIC_0014183.jpg 439 | ISIC_0012663.jpg 440 | ISIC_0013809.jpg 441 | ISIC_0012213.jpg 442 | ISIC_0013685.jpg 443 | ISIC_0016030.jpg 444 | ISIC_0015313.jpg 445 | ISIC_0011139.jpg 446 | ISIC_0013747.jpg 447 | ISIC_0000366.jpg 448 | ISIC_0010041.jpg 449 | ISIC_0000147.jpg 450 | ISIC_0013808.jpg 451 | ISIC_0000265.jpg 452 | ISIC_0015987.jpg 453 | ISIC_0013073.jpg 454 | ISIC_0015015.jpg 455 | ISIC_0010361.jpg 456 | ISIC_0013070.jpg 457 | ISIC_0000117.jpg 458 | ISIC_0012413.jpg 459 | ISIC_0000282.jpg 460 | ISIC_0012523.jpg 461 | ISIC_0013765.jpg 462 | ISIC_0000554.jpg 463 | ISIC_0010000.jpg 464 | ISIC_0003005.jpg 465 | ISIC_0016008.jpg 466 | ISIC_0015982.jpg 467 | ISIC_0011202.jpg 468 | ISIC_0001374.jpg 469 | ISIC_0000208.jpg 470 | ISIC_0012678.jpg 471 | ISIC_0016053.jpg 472 | ISIC_0010562.jpg 473 | ISIC_0015945.jpg 474 | ISIC_0000313.jpg 475 | ISIC_0000415.jpg 476 | ISIC_0000510.jpg 477 | ISIC_0009873.jpg 478 | ISIC_0012777.jpg 479 | ISIC_0014942.jpg 480 | ISIC_0012889.jpg 481 | ISIC_0015184.jpg 482 | ISIC_0014730.jpg 483 | ISIC_0000013.jpg 484 | ISIC_0012932.jpg 485 | ISIC_0015026.jpg 486 | ISIC_0012957.jpg 487 | ISIC_0014657.jpg 488 | ISIC_0012701.jpg 489 | ISIC_0014794.jpg 490 | ISIC_0014857.jpg 491 | ISIC_0014081.jpg 492 | ISIC_0012187.jpg 493 | ISIC_0013793.jpg 494 | ISIC_0012786.jpg 495 | ISIC_0010003.jpg 496 | ISIC_0015030.jpg 497 | ISIC_0002616.jpg 498 | ISIC_0012826.jpg 499 | ISIC_0014476.jpg 500 | ISIC_0013588.jpg 501 | ISIC_0013635.jpg 502 | ISIC_0015368.jpg 503 | ISIC_0014422.jpg 504 | ISIC_0009940.jpg 505 | ISIC_0010069.jpg 506 | ISIC_0009971.jpg 507 | ISIC_0000324.jpg 508 | ISIC_0014807.jpg 509 | ISIC_0008145.jpg 510 | ISIC_0013355.jpg 511 | ISIC_0013248.jpg 512 | ISIC_0012670.jpg 513 | ISIC_0014968.jpg 514 | ISIC_0010346.jpg 515 | ISIC_0015440.jpg 516 | ISIC_0010372.jpg 517 | ISIC_0002469.jpg 518 | ISIC_0014890.jpg 519 | ISIC_0012792.jpg 520 | ISIC_0000198.jpg 521 | ISIC_0015002.jpg 522 | ISIC_0013398.jpg 523 | ISIC_0010173.jpg 524 | ISIC_0014989.jpg 525 | ISIC_0000064.jpg 526 | ISIC_0013493.jpg 527 | ISIC_0013129.jpg 528 | ISIC_0003346.jpg 529 | ISIC_0012151.jpg 530 | ISIC_0000547.jpg 531 | ISIC_0010349.jpg 532 | ISIC_0014077.jpg 533 | ISIC_0001152.jpg 534 | ISIC_0011296.jpg 535 | ISIC_0014768.jpg 536 | ISIC_0015250.jpg 537 | ISIC_0006350.jpg 538 | ISIC_0012207.jpg 539 | ISIC_0000494.jpg 540 | ISIC_0010100.jpg 541 | ISIC_0008993.jpg 542 | ISIC_0012210.jpg 543 | ISIC_0013326.jpg 544 | ISIC_0000095.jpg 545 | ISIC_0012211.jpg 546 | ISIC_0000381.jpg 547 | ISIC_0000337.jpg 548 | ISIC_0011328.jpg 549 | ISIC_0012313.jpg 550 | ISIC_0013356.jpg 551 | ISIC_0012126.jpg 552 | ISIC_0010569.jpg 553 | ISIC_0013333.jpg 554 | ISIC_0000206.jpg 555 | ISIC_0014393.jpg 556 | ISIC_0009881.jpg 557 | ISIC_0016007.jpg 558 | ISIC_0014577.jpg 559 | ISIC_0009965.jpg 560 | ISIC_0012988.jpg 561 | ISIC_0000219.jpg 562 | ISIC_0014941.jpg 563 | ISIC_0000465.jpg 564 | ISIC_0008541.jpg 565 | ISIC_0014804.jpg 566 | ISIC_0013948.jpg 567 | ISIC_0014099.jpg 568 | ISIC_0002647.jpg 569 | ISIC_0000517.jpg 570 | ISIC_0006815.jpg 571 | ISIC_0012306.jpg 572 | ISIC_0014430.jpg 573 | ISIC_0013271.jpg 574 | ISIC_0013802.jpg 575 | ISIC_0000354.jpg 576 | ISIC_0000175.jpg 577 | ISIC_0011082.jpg 578 | ISIC_0013984.jpg 579 | ISIC_0015482.jpg 580 | ISIC_0015219.jpg 581 | ISIC_0012719.jpg 582 | ISIC_0014187.jpg 583 | ISIC_0013395.jpg 584 | ISIC_0013034.jpg 585 | ISIC_0014197.jpg 586 | ISIC_0000125.jpg 587 | ISIC_0006795.jpg 588 | ISIC_0013733.jpg 589 | ISIC_0010238.jpg 590 | ISIC_0011386.jpg 591 | ISIC_0012865.jpg 592 | ISIC_0012878.jpg 593 | ISIC_0000383.jpg 594 | ISIC_0011349.jpg 595 | ISIC_0015200.jpg 596 | ISIC_0011149.jpg 597 | ISIC_0015961.jpg 598 | ISIC_0010205.jpg 599 | ISIC_0000044.jpg 600 | ISIC_0010448.jpg 601 | ISIC_0001102.jpg 602 | ISIC_0000336.jpg 603 | ISIC_0013055.jpg 604 | ISIC_0013197.jpg 605 | ISIC_0012726.jpg 606 | ISIC_0012212.jpg 607 | ISIC_0000119.jpg 608 | ISIC_0000321.jpg 609 | ISIC_0015443.jpg 610 | ISIC_0015293.jpg 611 | ISIC_0011120.jpg 612 | ISIC_0013736.jpg 613 | ISIC_0014580.jpg 614 | ISIC_0010038.jpg 615 | ISIC_0010449.jpg 616 | ISIC_0015363.jpg 617 | ISIC_0015284.jpg 618 | ISIC_0009982.jpg 619 | ISIC_0013833.jpg 620 | ISIC_0014788.jpg 621 | ISIC_0014574.jpg 622 | ISIC_0009879.jpg 623 | ISIC_0000177.jpg 624 | ISIC_0012828.jpg 625 | ISIC_0013079.jpg 626 | ISIC_0012233.jpg 627 | ISIC_0014687.jpg 628 | ISIC_0011199.jpg 629 | ISIC_0012891.jpg 630 | ISIC_0009930.jpg 631 | ISIC_0012756.jpg 632 | ISIC_0015636.jpg 633 | ISIC_0013087.jpg 634 | ISIC_0000533.jpg 635 | ISIC_0010379.jpg 636 | ISIC_0015468.jpg 637 | ISIC_0009758.jpg 638 | ISIC_0000049.jpg 639 | ISIC_0013997.jpg 640 | ISIC_0011298.jpg 641 | ISIC_0013459.jpg 642 | ISIC_0012949.jpg 643 | ISIC_0010234.jpg 644 | ISIC_0014915.jpg 645 | ISIC_0011323.jpg 646 | ISIC_0009962.jpg 647 | ISIC_0000181.jpg 648 | ISIC_0013772.jpg 649 | ISIC_0001106.jpg 650 | ISIC_0000183.jpg 651 | ISIC_0012127.jpg 652 | ISIC_0010070.jpg 653 | ISIC_0010169.jpg 654 | ISIC_0000029.jpg 655 | ISIC_0001385.jpg 656 | ISIC_0000242.jpg 657 | ISIC_0010595.jpg 658 | ISIC_0012551.jpg 659 | ISIC_0015139.jpg 660 | ISIC_0013713.jpg 661 | ISIC_0005247.jpg 662 | ISIC_0000071.jpg 663 | ISIC_0010089.jpg 664 | ISIC_0013238.jpg 665 | ISIC_0000349.jpg 666 | ISIC_0000511.jpg 667 | ISIC_0010558.jpg 668 | ISIC_0009917.jpg 669 | ISIC_0004110.jpg 670 | ISIC_0014883.jpg 671 | ISIC_0000150.jpg 672 | ISIC_0012661.jpg 673 | ISIC_0014062.jpg 674 | ISIC_0000111.jpg 675 | ISIC_0015224.jpg 676 | ISIC_0014076.jpg 677 | ISIC_0000528.jpg 678 | ISIC_0010047.jpg 679 | ISIC_0014547.jpg 680 | ISIC_0013804.jpg 681 | ISIC_0011378.jpg 682 | ISIC_0014956.jpg 683 | ISIC_0014907.jpg 684 | ISIC_0011213.jpg 685 | ISIC_0001769.jpg 686 | ISIC_0013160.jpg 687 | ISIC_0014973.jpg 688 | ISIC_0014469.jpg 689 | ISIC_0010235.jpg 690 | ISIC_0012722.jpg 691 | ISIC_0014876.jpg 692 | ISIC_0013511.jpg 693 | ISIC_0000431.jpg 694 | ISIC_0013432.jpg 695 | ISIC_0014982.jpg 696 | ISIC_0014713.jpg 697 | ISIC_0012660.jpg 698 | ISIC_0001142.jpg 699 | ISIC_0013169.jpg 700 | ISIC_0009966.jpg 701 | ISIC_0012379.jpg 702 | ISIC_0007796.jpg 703 | ISIC_0000006.jpg 704 | ISIC_0015110.jpg 705 | ISIC_0012677.jpg 706 | ISIC_0014913.jpg 707 | ISIC_0006114.jpg 708 | ISIC_0014576.jpg 709 | ISIC_0014928.jpg 710 | ISIC_0016066.jpg 711 | ISIC_0000256.jpg 712 | ISIC_0000460.jpg 713 | ISIC_0013552.jpg 714 | ISIC_0000215.jpg 715 | ISIC_0011101.jpg 716 | ISIC_0014832.jpg 717 | ISIC_0000458.jpg 718 | ISIC_0013783.jpg 719 | ISIC_0015031.jpg 720 | ISIC_0010187.jpg 721 | ISIC_0011402.jpg 722 | ISIC_0010226.jpg 723 | ISIC_0012999.jpg 724 | ISIC_0010216.jpg 725 | ISIC_0010024.jpg 726 | ISIC_0012357.jpg 727 | ISIC_0015043.jpg 728 | ISIC_0010342.jpg 729 | ISIC_0014136.jpg 730 | ISIC_0015941.jpg 731 | ISIC_0014743.jpg 732 | ISIC_0010323.jpg 733 | ISIC_0001100.jpg 734 | ISIC_0013417.jpg 735 | ISIC_0000103.jpg 736 | ISIC_0000148.jpg 737 | ISIC_0010483.jpg 738 | ISIC_0015133.jpg 739 | ISIC_0000233.jpg 740 | ISIC_0008785.jpg 741 | ISIC_0002206.jpg 742 | ISIC_0000078.jpg 743 | ISIC_0014129.jpg 744 | ISIC_0014189.jpg 745 | ISIC_0012680.jpg 746 | ISIC_0009078.jpg 747 | ISIC_0010858.jpg 748 | ISIC_0010335.jpg 749 | ISIC_0014284.jpg 750 | ISIC_0014952.jpg 751 | ISIC_0009430.jpg 752 | ISIC_0013310.jpg 753 | ISIC_0013165.jpg 754 | ISIC_0014423.jpg 755 | ISIC_0014441.jpg 756 | ISIC_0000307.jpg 757 | ISIC_0013996.jpg 758 | ISIC_0014637.jpg 759 | ISIC_0000352.jpg 760 | ISIC_0012697.jpg 761 | ISIC_0015355.jpg 762 | ISIC_0014347.jpg 763 | ISIC_0000003.jpg 764 | ISIC_0000099.jpg 765 | ISIC_0012137.jpg 766 | ISIC_0009874.jpg 767 | ISIC_0010074.jpg 768 | ISIC_0000113.jpg 769 | ISIC_0012746.jpg 770 | ISIC_0013457.jpg 771 | ISIC_0009980.jpg 772 | ISIC_0009976.jpg 773 | ISIC_0010227.jpg 774 | ISIC_0015226.jpg 775 | ISIC_0010461.jpg 776 | ISIC_0014635.jpg 777 | ISIC_0012905.jpg 778 | ISIC_0014908.jpg 779 | ISIC_0012311.jpg 780 | ISIC_0012708.jpg 781 | ISIC_0014628.jpg 782 | ISIC_0010854.jpg 783 | ISIC_0012884.jpg 784 | ISIC_0013667.jpg 785 | ISIC_0012986.jpg 786 | ISIC_0013981.jpg 787 | ISIC_0015207.jpg 788 | ISIC_0000094.jpg 789 | ISIC_0014739.jpg 790 | ISIC_0012102.jpg 791 | ISIC_0012945.jpg 792 | ISIC_0015071.jpg 793 | ISIC_0014763.jpg 794 | ISIC_0012415.jpg 795 | ISIC_0012173.jpg 796 | ISIC_0010456.jpg 797 | ISIC_0000218.jpg 798 | ISIC_0013065.jpg 799 | ISIC_0011334.jpg 800 | ISIC_0011107.jpg 801 | ISIC_0015020.jpg 802 | ISIC_0012773.jpg 803 | ISIC_0013721.jpg 804 | ISIC_0000066.jpg 805 | ISIC_0000372.jpg 806 | ISIC_0015254.jpg 807 | ISIC_0013565.jpg 808 | ISIC_0012855.jpg 809 | ISIC_0015948.jpg 810 | ISIC_0000250.jpg 811 | ISIC_0013220.jpg 812 | ISIC_0000015.jpg 813 | ISIC_0012356.jpg 814 | ISIC_0010086.jpg 815 | ISIC_0013287.jpg 816 | ISIC_0012290.jpg 817 | ISIC_0006651.jpg 818 | ISIC_0010851.jpg 819 | ISIC_0012358.jpg 820 | ISIC_0000376.jpg 821 | ISIC_0013651.jpg 822 | ISIC_0013879.jpg 823 | ISIC_0014843.jpg 824 | ISIC_0010237.jpg 825 | ISIC_0010492.jpg 826 | ISIC_0008256.jpg 827 | ISIC_0000016.jpg 828 | ISIC_0013969.jpg 829 | ISIC_0013465.jpg 830 | ISIC_0014994.jpg 831 | ISIC_0011348.jpg 832 | ISIC_0015037.jpg 833 | ISIC_0014114.jpg 834 | ISIC_0013498.jpg 835 | ISIC_0012956.jpg 836 | ISIC_0010332.jpg 837 | ISIC_0010597.jpg 838 | ISIC_0010599.jpg 839 | ISIC_0000436.jpg 840 | ISIC_0013026.jpg 841 | ISIC_0013523.jpg 842 | ISIC_0000407.jpg 843 | ISIC_0014059.jpg 844 | ISIC_0014742.jpg 845 | ISIC_0016069.jpg 846 | ISIC_0016038.jpg 847 | ISIC_0000531.jpg 848 | ISIC_0012266.jpg 849 | ISIC_0000419.jpg 850 | ISIC_0014131.jpg 851 | ISIC_0014835.jpg 852 | ISIC_0012517.jpg 853 | ISIC_0014233.jpg 854 | ISIC_0000327.jpg 855 | ISIC_0013374.jpg 856 | ISIC_0013708.jpg 857 | ISIC_0007332.jpg 858 | ISIC_0000046.jpg 859 | ISIC_0000299.jpg 860 | ISIC_0011387.jpg 861 | ISIC_0015232.jpg 862 | ISIC_0011338.jpg 863 | ISIC_0013558.jpg 864 | ISIC_0000409.jpg 865 | ISIC_0012742.jpg 866 | ISIC_0000365.jpg 867 | ISIC_0011393.jpg 868 | ISIC_0013980.jpg 869 | ISIC_0012944.jpg 870 | ISIC_0014919.jpg 871 | ISIC_0016046.jpg 872 | ISIC_0000109.jpg 873 | ISIC_0002476.jpg 874 | ISIC_0013819.jpg 875 | ISIC_0000281.jpg 876 | ISIC_0014139.jpg 877 | ISIC_0012395.jpg 878 | ISIC_0015173.jpg 879 | ISIC_0015311.jpg 880 | ISIC_0014625.jpg 881 | ISIC_0012511.jpg 882 | ISIC_0014823.jpg 883 | ISIC_0000549.jpg 884 | ISIC_0013141.jpg 885 | ISIC_0014160.jpg 886 | ISIC_0012147.jpg 887 | ISIC_0014640.jpg 888 | ISIC_0013166.jpg 889 | ISIC_0000105.jpg 890 | ISIC_0000529.jpg 891 | ISIC_0013702.jpg 892 | ISIC_0013023.jpg 893 | ISIC_0014922.jpg 894 | ISIC_0013178.jpg 895 | ISIC_0012941.jpg 896 | ISIC_0009954.jpg 897 | ISIC_0001126.jpg 898 | ISIC_0013749.jpg 899 | ISIC_0000493.jpg 900 | ISIC_0000498.jpg 901 | ISIC_0014784.jpg 902 | ISIC_0000310.jpg 903 | ISIC_0015483.jpg 904 | ISIC_0013193.jpg 905 | ISIC_0004168.jpg 906 | ISIC_0000288.jpg 907 | ISIC_0014486.jpg 908 | ISIC_0013738.jpg 909 | ISIC_0013806.jpg 910 | ISIC_0000260.jpg 911 | ISIC_0010606.jpg 912 | ISIC_0011156.jpg 913 | ISIC_0015974.jpg 914 | ISIC_0015464.jpg 915 | ISIC_0014186.jpg 916 | ISIC_0012725.jpg 917 | ISIC_0000447.jpg 918 | ISIC_0012329.jpg 919 | ISIC_0010576.jpg 920 | ISIC_0012134.jpg 921 | ISIC_0008406.jpg 922 | ISIC_0016042.jpg 923 | ISIC_0000403.jpg 924 | ISIC_0012690.jpg 925 | ISIC_0000112.jpg 926 | ISIC_0013585.jpg 927 | ISIC_0014601.jpg 928 | ISIC_0015967.jpg 929 | ISIC_0015997.jpg 930 | ISIC_0012966.jpg 931 | ISIC_0000433.jpg 932 | ISIC_0015044.jpg 933 | ISIC_0006612.jpg 934 | ISIC_0002871.jpg 935 | ISIC_0008807.jpg 936 | ISIC_0013118.jpg 937 | ISIC_0011341.jpg 938 | ISIC_0015023.jpg 939 | ISIC_0000216.jpg 940 | ISIC_0010201.jpg 941 | ISIC_0015132.jpg 942 | ISIC_0000134.jpg 943 | ISIC_0014585.jpg 944 | ISIC_0008992.jpg 945 | ISIC_0000243.jpg 946 | ISIC_0010105.jpg 947 | ISIC_0013410.jpg 948 | ISIC_0015949.jpg 949 | ISIC_0010853.jpg 950 | ISIC_0014849.jpg 951 | ISIC_0015418.jpg 952 | ISIC_0012990.jpg 953 | ISIC_0012693.jpg 954 | ISIC_0014821.jpg 955 | ISIC_0015559.jpg 956 | ISIC_0015189.jpg 957 | ISIC_0010333.jpg 958 | ISIC_0012665.jpg 959 | ISIC_0010356.jpg 960 | ISIC_0000521.jpg 961 | ISIC_0013063.jpg 962 | ISIC_0011219.jpg 963 | ISIC_0014923.jpg 964 | ISIC_0011228.jpg 965 | ISIC_0013691.jpg 966 | ISIC_0000274.jpg 967 | ISIC_0013689.jpg 968 | ISIC_0010584.jpg 969 | ISIC_0000323.jpg 970 | ISIC_0000360.jpg 971 | ISIC_0000525.jpg 972 | ISIC_0009902.jpg 973 | ISIC_0013526.jpg 974 | ISIC_0013325.jpg 975 | ISIC_0011347.jpg 976 | ISIC_0010358.jpg 977 | ISIC_0014542.jpg 978 | ISIC_0011315.jpg 979 | ISIC_0014765.jpg 980 | ISIC_0000192.jpg 981 | ISIC_0014833.jpg 982 | ISIC_0015279.jpg 983 | ISIC_0015201.jpg 984 | ISIC_0000189.jpg 985 | ISIC_0015130.jpg 986 | ISIC_0013047.jpg 987 | ISIC_0010055.jpg 988 | ISIC_0013953.jpg 989 | ISIC_0000505.jpg 990 | ISIC_0000364.jpg 991 | ISIC_0000471.jpg 992 | ISIC_0009918.jpg 993 | ISIC_0014072.jpg 994 | ISIC_0014428.jpg 995 | ISIC_0009906.jpg 996 | ISIC_0012676.jpg 997 | ISIC_0013775.jpg 998 | ISIC_0014800.jpg 999 | ISIC_0012837.jpg 1000 | ISIC_0014541.jpg 1001 | ISIC_0000527.jpg 1002 | ISIC_0012510.jpg 1003 | ISIC_0013120.jpg 1004 | ISIC_0014937.jpg 1005 | ISIC_0012879.jpg 1006 | ISIC_0013803.jpg 1007 | ISIC_0015009.jpg 1008 | ISIC_0012880.jpg 1009 | ISIC_0016061.jpg 1010 | ISIC_0002107.jpg 1011 | ISIC_0012238.jpg 1012 | ISIC_0014308.jpg 1013 | ISIC_0013837.jpg 1014 | ISIC_0010025.jpg 1015 | ISIC_0012435.jpg 1016 | ISIC_0000500.jpg 1017 | ISIC_0012876.jpg 1018 | ISIC_0015369.jpg 1019 | ISIC_0014190.jpg 1020 | ISIC_0013680.jpg 1021 | ISIC_0014904.jpg 1022 | ISIC_0010444.jpg 1023 | ISIC_0002829.jpg 1024 | ISIC_0009165.jpg 1025 | ISIC_0015032.jpg 1026 | ISIC_0013411.jpg 1027 | ISIC_0000508.jpg 1028 | ISIC_0000127.jpg 1029 | ISIC_0016011.jpg 1030 | ISIC_0000166.jpg 1031 | ISIC_0014156.jpg 1032 | ISIC_0014029.jpg 1033 | ISIC_0011163.jpg 1034 | ISIC_0000449.jpg 1035 | ISIC_0010171.jpg 1036 | ISIC_0005620.jpg 1037 | ISIC_0013223.jpg 1038 | ISIC_0012314.jpg 1039 | ISIC_0010233.jpg 1040 | ISIC_0013134.jpg 1041 | ISIC_0003559.jpg 1042 | ISIC_0010228.jpg 1043 | ISIC_0012228.jpg 1044 | ISIC_0010857.jpg 1045 | ISIC_0010382.jpg 1046 | ISIC_0000019.jpg 1047 | ISIC_0013181.jpg 1048 | ISIC_0012090.jpg 1049 | ISIC_0000378.jpg 1050 | ISIC_0012907.jpg 1051 | ISIC_0010064.jpg 1052 | ISIC_0004115.jpg 1053 | ISIC_0014845.jpg 1054 | ISIC_0011210.jpg 1055 | ISIC_0000283.jpg 1056 | ISIC_0001296.jpg 1057 | ISIC_0013807.jpg 1058 | ISIC_0015142.jpg 1059 | ISIC_0014525.jpg 1060 | ISIC_0013910.jpg 1061 | ISIC_0014722.jpg 1062 | ISIC_0000126.jpg 1063 | ISIC_0011384.jpg 1064 | ISIC_0014791.jpg 1065 | ISIC_0013676.jpg 1066 | ISIC_0009943.jpg 1067 | ISIC_0012222.jpg 1068 | ISIC_0000535.jpg 1069 | ISIC_0010104.jpg 1070 | ISIC_0010864.jpg 1071 | ISIC_0009160.jpg 1072 | ISIC_0014473.jpg 1073 | ISIC_0000451.jpg 1074 | ISIC_0000295.jpg 1075 | ISIC_0016035.jpg 1076 | ISIC_0011223.jpg 1077 | ISIC_0011373.jpg 1078 | ISIC_0011305.jpg 1079 | ISIC_0009188.jpg 1080 | ISIC_0000008.jpg 1081 | ISIC_0000382.jpg 1082 | ISIC_0013244.jpg 1083 | ISIC_0014780.jpg 1084 | ISIC_0015998.jpg 1085 | ISIC_0014457.jpg 1086 | ISIC_0010054.jpg 1087 | ISIC_0000332.jpg 1088 | ISIC_0011295.jpg 1089 | ISIC_0013200.jpg 1090 | ISIC_0000140.jpg 1091 | ISIC_0003582.jpg 1092 | ISIC_0000298.jpg 1093 | ISIC_0013321.jpg 1094 | ISIC_0000374.jpg 1095 | ISIC_0011109.jpg 1096 | ISIC_0012883.jpg 1097 | ISIC_0001148.jpg 1098 | ISIC_0014855.jpg 1099 | ISIC_0013867.jpg 1100 | ISIC_0000541.jpg 1101 | ISIC_0013567.jpg 1102 | ISIC_0010494.jpg 1103 | ISIC_0000244.jpg 1104 | ISIC_0009297.jpg 1105 | ISIC_0000339.jpg 1106 | ISIC_0000416.jpg 1107 | ISIC_0013430.jpg 1108 | ISIC_0012227.jpg 1109 | ISIC_0000410.jpg 1110 | ISIC_0015496.jpg 1111 | ISIC_0012455.jpg 1112 | ISIC_0012179.jpg 1113 | ISIC_0009504.jpg 1114 | ISIC_0000515.jpg 1115 | ISIC_0016056.jpg 1116 | ISIC_0000107.jpg 1117 | ISIC_0010073.jpg 1118 | ISIC_0015309.jpg 1119 | ISIC_0010466.jpg 1120 | ISIC_0014933.jpg 1121 | ISIC_0012674.jpg 1122 | ISIC_0015983.jpg 1123 | ISIC_0000152.jpg 1124 | ISIC_0014527.jpg 1125 | ISIC_0010339.jpg 1126 | ISIC_0010850.jpg 1127 | ISIC_0013390.jpg 1128 | ISIC_0010264.jpg 1129 | ISIC_0013942.jpg 1130 | ISIC_0015244.jpg 1131 | ISIC_0010244.jpg 1132 | ISIC_0008913.jpg 1133 | ISIC_0009875.jpg 1134 | ISIC_0014535.jpg 1135 | ISIC_0000093.jpg 1136 | ISIC_0012711.jpg 1137 | ISIC_0010846.jpg 1138 | ISIC_0014998.jpg 1139 | ISIC_0010090.jpg 1140 | ISIC_0014860.jpg 1141 | ISIC_0014872.jpg 1142 | ISIC_0013314.jpg 1143 | ISIC_0011166.jpg 1144 | ISIC_0014966.jpg 1145 | ISIC_0015108.jpg 1146 | ISIC_0000434.jpg 1147 | ISIC_0013393.jpg 1148 | ISIC_0001242.jpg 1149 | ISIC_0000225.jpg 1150 | ISIC_0015255.jpg 1151 | ISIC_0013294.jpg 1152 | ISIC_0010168.jpg 1153 | ISIC_0016048.jpg 1154 | ISIC_0014346.jpg 1155 | ISIC_0010265.jpg 1156 | ISIC_0006982.jpg 1157 | ISIC_0002374.jpg 1158 | ISIC_0013495.jpg 1159 | ISIC_0004985.jpg 1160 | ISIC_0010863.jpg 1161 | ISIC_0015056.jpg 1162 | ISIC_0015360.jpg 1163 | ISIC_0013670.jpg 1164 | ISIC_0012330.jpg 1165 | ISIC_0013815.jpg 1166 | ISIC_0013423.jpg 1167 | ISIC_0015956.jpg 1168 | ISIC_0014683.jpg 1169 | ISIC_0011300.jpg 1170 | ISIC_0012654.jpg 1171 | ISIC_0013291.jpg 1172 | ISIC_0015607.jpg 1173 | ISIC_0008524.jpg 1174 | ISIC_0010056.jpg 1175 | ISIC_0013795.jpg 1176 | ISIC_0000167.jpg 1177 | ISIC_0013490.jpg 1178 | ISIC_0015485.jpg 1179 | ISIC_0010075.jpg 1180 | ISIC_0013089.jpg 1181 | ISIC_0000540.jpg 1182 | ISIC_0000495.jpg 1183 | ISIC_0011115.jpg 1184 | ISIC_0015211.jpg 1185 | ISIC_0000548.jpg 1186 | ISIC_0009868.jpg 1187 | ISIC_0013600.jpg 1188 | ISIC_0016054.jpg 1189 | ISIC_0000261.jpg 1190 | ISIC_0008396.jpg 1191 | ISIC_0012417.jpg 1192 | ISIC_0013789.jpg 1193 | ISIC_0016072.jpg 1194 | ISIC_0000470.jpg 1195 | ISIC_0013427.jpg 1196 | ISIC_0014605.jpg 1197 | ISIC_0012260.jpg 1198 | ISIC_0015566.jpg 1199 | ISIC_0000355.jpg 1200 | ISIC_0012316.jpg 1201 | ISIC_0012682.jpg 1202 | ISIC_0015193.jpg 1203 | ISIC_0002246.jpg 1204 | ISIC_0013946.jpg 1205 | ISIC_0015563.jpg 1206 | ISIC_0015404.jpg 1207 | ISIC_0003462.jpg 1208 | ISIC_0010006.jpg 1209 | ISIC_0000397.jpg 1210 | ISIC_0000234.jpg 1211 | ISIC_0011097.jpg 1212 | ISIC_0013739.jpg 1213 | ISIC_0015976.jpg 1214 | ISIC_0012473.jpg 1215 | ISIC_0013213.jpg 1216 | ISIC_0000555.jpg 1217 | ISIC_0009935.jpg 1218 | ISIC_0012679.jpg 1219 | ISIC_0010036.jpg 1220 | ISIC_0000317.jpg 1221 | ISIC_0014879.jpg 1222 | ISIC_0016015.jpg 1223 | ISIC_0012159.jpg 1224 | ISIC_0000196.jpg 1225 | ISIC_0014925.jpg 1226 | ISIC_0014927.jpg 1227 | ISIC_0015984.jpg 1228 | ISIC_0013071.jpg 1229 | ISIC_0011118.jpg 1230 | ISIC_0015310.jpg 1231 | ISIC_0012206.jpg 1232 | ISIC_0010321.jpg 1233 | ISIC_0000021.jpg 1234 | ISIC_0009975.jpg 1235 | ISIC_0015218.jpg 1236 | ISIC_0014328.jpg 1237 | ISIC_0012205.jpg 1238 | ISIC_0012232.jpg 1239 | ISIC_0015447.jpg 1240 | ISIC_0012873.jpg 1241 | ISIC_0012737.jpg 1242 | ISIC_0000300.jpg 1243 | ISIC_0016023.jpg 1244 | ISIC_0005787.jpg 1245 | ISIC_0000055.jpg 1246 | ISIC_0009993.jpg 1247 | ISIC_0016022.jpg 1248 | ISIC_0012281.jpg 1249 | ISIC_0000353.jpg 1250 | ISIC_0011151.jpg 1251 | ISIC_0000190.jpg 1252 | ISIC_0012494.jpg 1253 | ISIC_0005639.jpg 1254 | ISIC_0013487.jpg 1255 | ISIC_0013842.jpg 1256 | ISIC_0014498.jpg 1257 | ISIC_0012768.jpg 1258 | ISIC_0014316.jpg 1259 | ISIC_0013549.jpg 1260 | ISIC_0011398.jpg 1261 | ISIC_0014685.jpg 1262 | ISIC_0000132.jpg 1263 | ISIC_0010002.jpg 1264 | ISIC_0012804.jpg 1265 | ISIC_0012685.jpg 1266 | ISIC_0013320.jpg 1267 | ISIC_0010053.jpg 1268 | ISIC_0014773.jpg 1269 | ISIC_0000022.jpg 1270 | ISIC_0009990.jpg 1271 | ISIC_0008998.jpg 1272 | ISIC_0012793.jpg 1273 | ISIC_0014619.jpg 1274 | ISIC_0012342.jpg 1275 | ISIC_0013249.jpg 1276 | ISIC_0010604.jpg 1277 | ISIC_0001128.jpg 1278 | ISIC_0014979.jpg 1279 | ISIC_0008236.jpg 1280 | ISIC_0013861.jpg 1281 | ISIC_0014349.jpg 1282 | ISIC_0000537.jpg 1283 | ISIC_0010329.jpg 1284 | ISIC_0007241.jpg 1285 | ISIC_0011329.jpg 1286 | ISIC_0013126.jpg 1287 | ISIC_0000220.jpg 1288 | ISIC_0000058.jpg 1289 | ISIC_0000173.jpg 1290 | ISIC_0012191.jpg 1291 | ISIC_0014901.jpg 1292 | ISIC_0015582.jpg 1293 | ISIC_0011162.jpg 1294 | ISIC_0014787.jpg 1295 | ISIC_0010365.jpg 1296 | ISIC_0004346.jpg 1297 | ISIC_0011099.jpg 1298 | ISIC_0013109.jpg 1299 | ISIC_0014596.jpg 1300 | ISIC_0011390.jpg 1301 | ISIC_0014753.jpg 1302 | ISIC_0000205.jpg 1303 | ISIC_0014947.jpg 1304 | ISIC_0013359.jpg 1305 | ISIC_0013385.jpg 1306 | ISIC_0014755.jpg 1307 | ISIC_0013257.jpg 1308 | ISIC_0000212.jpg 1309 | ISIC_0016055.jpg 1310 | ISIC_0011218.jpg 1311 | ISIC_0010463.jpg 1312 | ISIC_0013163.jpg 1313 | ISIC_0000129.jpg 1314 | ISIC_0000050.jpg 1315 | ISIC_0013562.jpg 1316 | ISIC_0000231.jpg 1317 | ISIC_0013204.jpg 1318 | ISIC_0014522.jpg 1319 | ISIC_0010028.jpg 1320 | ISIC_0002353.jpg 1321 | ISIC_0009898.jpg 1322 | ISIC_0000153.jpg 1323 | ISIC_0001267.jpg 1324 | ISIC_0010592.jpg 1325 | ISIC_0011081.jpg 1326 | ISIC_0011343.jpg 1327 | ISIC_0009800.jpg 1328 | ISIC_0015991.jpg 1329 | ISIC_0014327.jpg 1330 | ISIC_0015445.jpg 1331 | ISIC_0001119.jpg 1332 | ISIC_0012976.jpg 1333 | ISIC_0010241.jpg 1334 | ISIC_0009910.jpg 1335 | ISIC_0000543.jpg 1336 | ISIC_0014862.jpg 1337 | ISIC_0009941.jpg 1338 | ISIC_0012939.jpg 1339 | ISIC_0015013.jpg 1340 | ISIC_0010844.jpg 1341 | ISIC_0016034.jpg 1342 | ISIC_0009994.jpg 1343 | ISIC_0015274.jpg 1344 | ISIC_0000028.jpg 1345 | ISIC_0014069.jpg 1346 | ISIC_0002836.jpg 1347 | ISIC_0013096.jpg 1348 | ISIC_0014778.jpg 1349 | ISIC_0012547.jpg 1350 | ISIC_0010014.jpg 1351 | ISIC_0009934.jpg 1352 | ISIC_0015972.jpg 1353 | ISIC_0013865.jpg 1354 | ISIC_0013084.jpg 1355 | ISIC_0011303.jpg 1356 | ISIC_0014291.jpg 1357 | ISIC_0014948.jpg 1358 | ISIC_0000098.jpg 1359 | ISIC_0010018.jpg 1360 | ISIC_0010447.jpg 1361 | ISIC_0014073.jpg 1362 | ISIC_0010585.jpg 1363 | ISIC_0000269.jpg 1364 | ISIC_0009938.jpg 1365 | ISIC_0014912.jpg 1366 | ISIC_0014117.jpg 1367 | ISIC_0014103.jpg 1368 | ISIC_0013329.jpg 1369 | ISIC_0012135.jpg 1370 | ISIC_0000157.jpg 1371 | ISIC_0000340.jpg 1372 | ISIC_0015206.jpg 1373 | ISIC_0015937.jpg 1374 | ISIC_0015079.jpg 1375 | ISIC_0010320.jpg 1376 | ISIC_0001852.jpg 1377 | ISIC_0000062.jpg 1378 | ISIC_0012664.jpg 1379 | ISIC_0011294.jpg 1380 | ISIC_0010462.jpg 1381 | ISIC_0013170.jpg 1382 | ISIC_0010249.jpg 1383 | ISIC_0015625.jpg 1384 | ISIC_0016041.jpg 1385 | ISIC_0015985.jpg 1386 | ISIC_0000489.jpg 1387 | ISIC_0000342.jpg 1388 | ISIC_0009880.jpg 1389 | ISIC_0012961.jpg 1390 | ISIC_0014028.jpg 1391 | ISIC_0011324.jpg 1392 | ISIC_0010029.jpg 1393 | ISIC_0000999.jpg 1394 | ISIC_0014386.jpg 1395 | ISIC_0013230.jpg 1396 | ISIC_0006671.jpg 1397 | ISIC_0011137.jpg 1398 | ISIC_0013839.jpg 1399 | ISIC_0013488.jpg 1400 | ISIC_0012268.jpg 1401 | ISIC_0013472.jpg 1402 | ISIC_0014164.jpg 1403 | ISIC_0012741.jpg 1404 | ISIC_0014795.jpg 1405 | ISIC_0013888.jpg 1406 | ISIC_0012655.jpg 1407 | ISIC_0012902.jpg 1408 | ISIC_0014639.jpg 1409 | ISIC_0015258.jpg 1410 | ISIC_0009896.jpg 1411 | ISIC_0012840.jpg 1412 | ISIC_0009956.jpg 1413 | ISIC_0000171.jpg 1414 | ISIC_0014110.jpg 1415 | ISIC_0000053.jpg 1416 | ISIC_0014185.jpg 1417 | ISIC_0000041.jpg 1418 | ISIC_0001190.jpg 1419 | ISIC_0014217.jpg 1420 | ISIC_0013205.jpg 1421 | ISIC_0000343.jpg 1422 | ISIC_0013886.jpg 1423 | ISIC_0014682.jpg 1424 | ISIC_0013121.jpg 1425 | ISIC_0010066.jpg 1426 | ISIC_0013982.jpg 1427 | ISIC_0014454.jpg 1428 | ISIC_0000048.jpg 1429 | ISIC_0013330.jpg 1430 | ISIC_0014500.jpg 1431 | ISIC_0014572.jpg 1432 | ISIC_0015174.jpg 1433 | ISIC_0013164.jpg 1434 | ISIC_0014666.jpg 1435 | ISIC_0016016.jpg 1436 | ISIC_0012285.jpg 1437 | ISIC_0000253.jpg 1438 | ISIC_0014985.jpg 1439 | ISIC_0013967.jpg 1440 | ISIC_0011360.jpg 1441 | ISIC_0015638.jpg 1442 | ISIC_0000222.jpg 1443 | ISIC_0000101.jpg 1444 | ISIC_0012160.jpg 1445 | ISIC_0014735.jpg 1446 | ISIC_0013458.jpg 1447 | ISIC_0000163.jpg 1448 | ISIC_0000314.jpg 1449 | ISIC_0011161.jpg 1450 | ISIC_0013405.jpg 1451 | ISIC_0000038.jpg 1452 | ISIC_0011372.jpg 1453 | ISIC_0010175.jpg 1454 | ISIC_0013578.jpg 1455 | ISIC_0010439.jpg 1456 | ISIC_0012143.jpg 1457 | ISIC_0000017.jpg 1458 | ISIC_0013672.jpg 1459 | ISIC_0013615.jpg 1460 | ISIC_0000320.jpg 1461 | ISIC_0013340.jpg 1462 | ISIC_0014962.jpg 1463 | ISIC_0015050.jpg 1464 | ISIC_0014529.jpg 1465 | ISIC_0013229.jpg 1466 | ISIC_0013597.jpg 1467 | ISIC_0000020.jpg 1468 | ISIC_0013035.jpg 1469 | ISIC_0011165.jpg 1470 | ISIC_0012450.jpg 1471 | ISIC_0014253.jpg 1472 | ISIC_0012235.jpg 1473 | ISIC_0000279.jpg 1474 | ISIC_0010057.jpg 1475 | ISIC_0014826.jpg 1476 | ISIC_0012374.jpg 1477 | ISIC_0000077.jpg 1478 | ISIC_0010078.jpg 1479 | ISIC_0013219.jpg 1480 | ISIC_0012657.jpg 1481 | ISIC_0014013.jpg 1482 | ISIC_0015455.jpg 1483 | ISIC_0006776.jpg 1484 | ISIC_0013678.jpg 1485 | ISIC_0013936.jpg 1486 | ISIC_0014151.jpg 1487 | ISIC_0013501.jpg 1488 | ISIC_0000073.jpg 1489 | ISIC_0008403.jpg 1490 | ISIC_0000534.jpg 1491 | ISIC_0010058.jpg 1492 | ISIC_0012898.jpg 1493 | ISIC_0013799.jpg 1494 | ISIC_0015264.jpg 1495 | ISIC_0010222.jpg 1496 | ISIC_0015942.jpg 1497 | ISIC_0013603.jpg 1498 | ISIC_0000138.jpg 1499 | ISIC_0000539.jpg 1500 | ISIC_0015950.jpg 1501 | ISIC_0015943.jpg 1502 | ISIC_0013086.jpg 1503 | ISIC_0001216.jpg 1504 | ISIC_0010174.jpg 1505 | ISIC_0009947.jpg 1506 | ISIC_0000331.jpg 1507 | ISIC_0013517.jpg 1508 | ISIC_0001275.jpg 1509 | ISIC_0000229.jpg 1510 | ISIC_0009583.jpg 1511 | ISIC_0011110.jpg 1512 | ISIC_0000251.jpg 1513 | ISIC_0015229.jpg 1514 | ISIC_0014336.jpg 1515 | ISIC_0010225.jpg 1516 | ISIC_0013203.jpg 1517 | ISIC_0010067.jpg 1518 | ISIC_0000421.jpg 1519 | ISIC_0013201.jpg 1520 | ISIC_0002251.jpg 1521 | ISIC_0000385.jpg 1522 | ISIC_0014480.jpg 1523 | ISIC_0000000.jpg 1524 | ISIC_0010071.jpg 1525 | ISIC_0012930.jpg 1526 | ISIC_0014806.jpg 1527 | ISIC_0005548.jpg 1528 | ISIC_0015957.jpg 1529 | ISIC_0000169.jpg 1530 | ISIC_0013232.jpg 1531 | ISIC_0010010.jpg 1532 | ISIC_0012659.jpg 1533 | ISIC_0014094.jpg 1534 | ISIC_0001299.jpg 1535 | ISIC_0012390.jpg 1536 | ISIC_0000161.jpg 1537 | ISIC_0009885.jpg 1538 | ISIC_0015202.jpg 1539 | ISIC_0014930.jpg 1540 | ISIC_0000043.jpg 1541 | ISIC_0012372.jpg 1542 | ISIC_0013275.jpg 1543 | ISIC_0013684.jpg 1544 | ISIC_0014684.jpg 1545 | ISIC_0010194.jpg 1546 | ISIC_0016009.jpg 1547 | ISIC_0014779.jpg 1548 | ISIC_0010317.jpg 1549 | ISIC_0000018.jpg 1550 | ISIC_0014026.jpg 1551 | ISIC_0014080.jpg 1552 | ISIC_0012288.jpg 1553 | ISIC_0014567.jpg 1554 | ISIC_0011327.jpg 1555 | ISIC_0002439.jpg 1556 | ISIC_0000131.jpg 1557 | ISIC_0015179.jpg 1558 | ISIC_0015960.jpg 1559 | ISIC_0016051.jpg 1560 | ISIC_0014796.jpg 1561 | ISIC_0010063.jpg 1562 | ISIC_0010191.jpg 1563 | ISIC_0012177.jpg 1564 | ISIC_0010060.jpg 1565 | ISIC_0014353.jpg 1566 | ISIC_0013559.jpg 1567 | ISIC_0012740.jpg 1568 | ISIC_0000477.jpg 1569 | ISIC_0000001.jpg 1570 | ISIC_0010322.jpg 1571 | ISIC_0014643.jpg 1572 | ISIC_0011144.jpg 1573 | ISIC_0016005.jpg 1574 | ISIC_0010022.jpg 1575 | ISIC_0013782.jpg 1576 | ISIC_0016012.jpg 1577 | ISIC_0009937.jpg 1578 | ISIC_0013617.jpg 1579 | ISIC_0013573.jpg 1580 | ISIC_0014394.jpg 1581 | ISIC_0015005.jpg 1582 | ISIC_0014725.jpg 1583 | ISIC_0013674.jpg 1584 | ISIC_0016006.jpg 1585 | ISIC_0000025.jpg 1586 | ISIC_0009298.jpg 1587 | ISIC_0002879.jpg 1588 | ISIC_0013269.jpg 1589 | ISIC_0014623.jpg 1590 | ISIC_0015007.jpg 1591 | ISIC_0010020.jpg 1592 | ISIC_0000546.jpg 1593 | ISIC_0012969.jpg 1594 | ISIC_0001181.jpg 1595 | ISIC_0013346.jpg 1596 | ISIC_0012720.jpg 1597 | ISIC_0009897.jpg 1598 | ISIC_0000088.jpg 1599 | ISIC_0012758.jpg 1600 | ISIC_0014714.jpg 1601 | ISIC_0013207.jpg 1602 | ISIC_0015237.jpg 1603 | ISIC_0013473.jpg 1604 | ISIC_0014680.jpg 1605 | ISIC_0013972.jpg 1606 | ISIC_0000052.jpg 1607 | ISIC_0000536.jpg 1608 | ISIC_0000482.jpg 1609 | ISIC_0000060.jpg 1610 | ISIC_0013918.jpg 1611 | ISIC_0000293.jpg 1612 | ISIC_0012223.jpg 1613 | ISIC_0016060.jpg 1614 | ISIC_0014526.jpg 1615 | ISIC_0008507.jpg 1616 | ISIC_0015155.jpg 1617 | ISIC_0013461.jpg 1618 | ISIC_0013843.jpg 1619 | ISIC_0014286.jpg 1620 | ISIC_0012107.jpg 1621 | ISIC_0012675.jpg 1622 | ISIC_0012348.jpg 1623 | ISIC_0009891.jpg 1624 | ISIC_0014044.jpg 1625 | ISIC_0014749.jpg 1626 | ISIC_0009923.jpg 1627 | ISIC_0000479.jpg 1628 | ISIC_0015990.jpg 1629 | ISIC_0014357.jpg 1630 | ISIC_0014836.jpg 1631 | ISIC_0013242.jpg 1632 | ISIC_0014854.jpg 1633 | ISIC_0009932.jpg 1634 | ISIC_0000149.jpg 1635 | ISIC_0006711.jpg 1636 | ISIC_0015980.jpg 1637 | ISIC_0000427.jpg 1638 | ISIC_0016059.jpg 1639 | ISIC_0012376.jpg 1640 | ISIC_0016000.jpg 1641 | ISIC_0011177.jpg 1642 | ISIC_0000186.jpg 1643 | ISIC_0011220.jpg 1644 | ISIC_0012201.jpg 1645 | ISIC_0010336.jpg 1646 | ISIC_0013529.jpg 1647 | ISIC_0014707.jpg 1648 | ISIC_0013762.jpg 1649 | ISIC_0000514.jpg 1650 | ISIC_0000156.jpg 1651 | ISIC_0009888.jpg 1652 | ISIC_0014957.jpg 1653 | ISIC_0011357.jpg 1654 | ISIC_0016044.jpg 1655 | ISIC_0015940.jpg 1656 | ISIC_0016057.jpg 1657 | ISIC_0015170.jpg 1658 | ISIC_0011204.jpg 1659 | ISIC_0016019.jpg 1660 | ISIC_0000552.jpg 1661 | ISIC_0000236.jpg 1662 | ISIC_0000338.jpg 1663 | ISIC_0013998.jpg 1664 | ISIC_0014458.jpg 1665 | ISIC_0009899.jpg 1666 | ISIC_0001188.jpg 1667 | ISIC_0013643.jpg 1668 | ISIC_0014624.jpg 1669 | ISIC_0011130.jpg 1670 | ISIC_0003657.jpg 1671 | ISIC_0014583.jpg 1672 | ISIC_0000054.jpg 1673 | ISIC_0013634.jpg 1674 | ISIC_0009964.jpg 1675 | ISIC_0000024.jpg 1676 | ISIC_0009944.jpg 1677 | ISIC_0000154.jpg 1678 | ISIC_0000079.jpg 1679 | ISIC_0011359.jpg 1680 | ISIC_0000485.jpg 1681 | ISIC_0012749.jpg 1682 | ISIC_0009919.jpg 1683 | ISIC_0000487.jpg 1684 | ISIC_0013737.jpg 1685 | ISIC_0001427.jpg 1686 | ISIC_0013777.jpg 1687 | ISIC_0011169.jpg 1688 | ISIC_0000258.jpg 1689 | ISIC_0014559.jpg 1690 | ISIC_0014703.jpg 1691 | ISIC_0013610.jpg 1692 | ISIC_0014926.jpg 1693 | ISIC_0012425.jpg 1694 | ISIC_0015312.jpg 1695 | ISIC_0014092.jpg 1696 | ISIC_0002673.jpg 1697 | ISIC_0011227.jpg 1698 | ISIC_0015939.jpg 1699 | ISIC_0001184.jpg 1700 | ISIC_0010257.jpg 1701 | ISIC_0013527.jpg 1702 | ISIC_0000271.jpg 1703 | ISIC_0010479.jpg 1704 | ISIC_0010262.jpg 1705 | ISIC_0015149.jpg 1706 | ISIC_0000047.jpg 1707 | ISIC_0013766.jpg 1708 | ISIC_0002948.jpg 1709 | ISIC_0013474.jpg 1710 | ISIC_0013335.jpg 1711 | ISIC_0000377.jpg 1712 | ISIC_0013929.jpg 1713 | ISIC_0012520.jpg 1714 | ISIC_0011208.jpg 1715 | ISIC_0010204.jpg 1716 | ISIC_0013044.jpg 1717 | ISIC_0006800.jpg 1718 | ISIC_0014808.jpg 1719 | ISIC_0015157.jpg 1720 | ISIC_0010368.jpg 1721 | ISIC_0012224.jpg 1722 | ISIC_0012489.jpg 1723 | ISIC_0010590.jpg 1724 | ISIC_0015057.jpg 1725 | ISIC_0015127.jpg 1726 | ISIC_0012965.jpg 1727 | ISIC_0012550.jpg 1728 | ISIC_0012713.jpg 1729 | ISIC_0010581.jpg 1730 | ISIC_0015233.jpg 1731 | ISIC_0000235.jpg 1732 | ISIC_0014940.jpg 1733 | ISIC_0012432.jpg 1734 | ISIC_0012666.jpg 1735 | ISIC_0015003.jpg 1736 | ISIC_0012261.jpg 1737 | ISIC_0011361.jpg 1738 | ISIC_0012094.jpg 1739 | ISIC_0013908.jpg 1740 | ISIC_0000461.jpg 1741 | ISIC_0013817.jpg 1742 | ISIC_0010178.jpg 1743 | ISIC_0015256.jpg 1744 | ISIC_0000255.jpg 1745 | ISIC_0015936.jpg 1746 | ISIC_0014311.jpg 1747 | ISIC_0012539.jpg 1748 | ISIC_0000507.jpg 1749 | ISIC_0002287.jpg 1750 | ISIC_0000395.jpg 1751 | ISIC_0000328.jpg 1752 | ISIC_0012770.jpg 1753 | ISIC_0000484.jpg 1754 | ISIC_0014599.jpg 1755 | ISIC_0013626.jpg 1756 | ISIC_0000325.jpg 1757 | ISIC_0010093.jpg 1758 | ISIC_0012544.jpg 1759 | ISIC_0011362.jpg 1760 | ISIC_0011344.jpg 1761 | ISIC_0013190.jpg 1762 | ISIC_0013177.jpg 1763 | ISIC_0015412.jpg 1764 | ISIC_0003056.jpg 1765 | ISIC_0012182.jpg 1766 | ISIC_0016013.jpg 1767 | ISIC_0014302.jpg 1768 | ISIC_0000051.jpg 1769 | ISIC_0013758.jpg 1770 | ISIC_0013434.jpg 1771 | ISIC_0000523.jpg 1772 | ISIC_0013208.jpg 1773 | ISIC_0014090.jpg 1774 | ISIC_0012105.jpg 1775 | ISIC_0007475.jpg 1776 | ISIC_0000184.jpg 1777 | ISIC_0014392.jpg 1778 | ISIC_0014397.jpg 1779 | ISIC_0012445.jpg 1780 | ISIC_0000188.jpg 1781 | ISIC_0015331.jpg 1782 | ISIC_0013424.jpg 1783 | ISIC_0010185.jpg 1784 | ISIC_0015156.jpg 1785 | ISIC_0010465.jpg 1786 | ISIC_0010092.jpg 1787 | ISIC_0008029.jpg 1788 | ISIC_0000115.jpg 1789 | ISIC_0012273.jpg 1790 | ISIC_0013499.jpg 1791 | ISIC_0010473.jpg 1792 | ISIC_0013090.jpg 1793 | ISIC_0015330.jpg 1794 | ISIC_0011155.jpg 1795 | ISIC_0000228.jpg 1796 | ISIC_0014772.jpg 1797 | ISIC_0010330.jpg 1798 | ISIC_0011117.jpg 1799 | ISIC_0001118.jpg 1800 | ISIC_0015241.jpg 1801 | ISIC_0013844.jpg 1802 | ISIC_0015035.jpg 1803 | ISIC_0007141.jpg 1804 | ISIC_0016040.jpg 1805 | ISIC_0010572.jpg 1806 | ISIC_0013150.jpg 1807 | ISIC_0000023.jpg 1808 | ISIC_0009901.jpg 1809 | ISIC_0000080.jpg 1810 | ISIC_0012250.jpg 1811 | ISIC_0014438.jpg 1812 | ISIC_0001134.jpg 1813 | ISIC_0014365.jpg 1814 | ISIC_0007528.jpg 1815 | ISIC_0015115.jpg 1816 | ISIC_0013592.jpg 1817 | -------------------------------------------------------------------------------- /Datasets/validation.list: -------------------------------------------------------------------------------- 1 | ISIC_0013897.jpg 2 | ISIC_0001103.jpg 3 | ISIC_0013224.jpg 4 | ISIC_0011168.jpg 5 | ISIC_0011317.jpg 6 | ISIC_0012245.jpg 7 | ISIC_0010552.jpg 8 | ISIC_0016029.jpg 9 | ISIC_0014642.jpg 10 | ISIC_0012325.jpg 11 | ISIC_0005000.jpg 12 | ISIC_0000392.jpg 13 | ISIC_0000100.jpg 14 | ISIC_0015436.jpg 15 | ISIC_0013094.jpg 16 | ISIC_0014975.jpg 17 | ISIC_0009927.jpg 18 | ISIC_0010849.jpg 19 | ISIC_0012149.jpg 20 | ISIC_0014920.jpg 21 | ISIC_0000199.jpg 22 | ISIC_0007038.jpg 23 | ISIC_0011119.jpg 24 | ISIC_0014938.jpg 25 | ISIC_0012178.jpg 26 | ISIC_0015051.jpg 27 | ISIC_0012549.jpg 28 | ISIC_0010435.jpg 29 | ISIC_0011397.jpg 30 | ISIC_0014846.jpg 31 | ISIC_0014863.jpg 32 | ISIC_0013671.jpg 33 | ISIC_0012715.jpg 34 | ISIC_0014961.jpg 35 | ISIC_0010847.jpg 36 | ISIC_0012284.jpg 37 | ISIC_0014754.jpg 38 | ISIC_0014745.jpg 39 | ISIC_0012704.jpg 40 | ISIC_0014032.jpg 41 | ISIC_0010102.jpg 42 | ISIC_0012503.jpg 43 | ISIC_0000035.jpg 44 | ISIC_0013500.jpg 45 | ISIC_0015118.jpg 46 | ISIC_0013925.jpg 47 | ISIC_0000197.jpg 48 | ISIC_0010207.jpg 49 | ISIC_0000259.jpg 50 | ISIC_0012702.jpg 51 | ISIC_0010498.jpg 52 | ISIC_0012789.jpg 53 | ISIC_0014149.jpg 54 | ISIC_0000185.jpg 55 | ISIC_0000290.jpg 56 | ISIC_0011374.jpg 57 | ISIC_0012671.jpg 58 | ISIC_0010862.jpg 59 | ISIC_0000311.jpg 60 | ISIC_0000277.jpg 61 | ISIC_0009969.jpg 62 | ISIC_0014609.jpg 63 | ISIC_0009979.jpg 64 | ISIC_0014178.jpg 65 | ISIC_0000252.jpg 66 | ISIC_0000544.jpg 67 | ISIC_0010190.jpg 68 | ISIC_0000504.jpg 69 | ISIC_0013719.jpg 70 | ISIC_0016014.jpg 71 | ISIC_0015158.jpg 72 | ISIC_0011229.jpg 73 | ISIC_0012487.jpg 74 | ISIC_0012391.jpg 75 | ISIC_0011123.jpg 76 | ISIC_0000357.jpg 77 | ISIC_0015140.jpg 78 | ISIC_0011140.jpg 79 | ISIC_0012156.jpg 80 | ISIC_0016017.jpg 81 | ISIC_0010252.jpg 82 | ISIC_0010848.jpg 83 | ISIC_0014792.jpg 84 | ISIC_0013128.jpg 85 | ISIC_0011105.jpg 86 | ISIC_0014667.jpg 87 | ISIC_0009998.jpg 88 | ISIC_0013146.jpg 89 | ISIC_0010367.jpg 90 | ISIC_0014797.jpg 91 | ISIC_0015260.jpg 92 | ISIC_0016033.jpg 93 | ISIC_0000488.jpg 94 | ISIC_0015645.jpg 95 | ISIC_0010229.jpg 96 | ISIC_0009978.jpg 97 | ISIC_0012662.jpg 98 | ISIC_0000247.jpg 99 | ISIC_0011150.jpg 100 | ISIC_0012673.jpg 101 | ISIC_0014093.jpg 102 | ISIC_0001262.jpg 103 | ISIC_0000143.jpg 104 | ISIC_0010593.jpg 105 | ISIC_0009972.jpg 106 | ISIC_0012484.jpg 107 | ISIC_0013863.jpg 108 | ISIC_0001442.jpg 109 | ISIC_0012836.jpg 110 | ISIC_0006940.jpg 111 | ISIC_0014911.jpg 112 | ISIC_0016004.jpg 113 | ISIC_0013315.jpg 114 | ISIC_0013394.jpg 115 | ISIC_0003174.jpg 116 | ISIC_0013054.jpg 117 | ISIC_0010251.jpg 118 | ISIC_0000027.jpg 119 | ISIC_0014688.jpg 120 | ISIC_0005555.jpg 121 | ISIC_0012208.jpg 122 | ISIC_0009883.jpg 123 | ISIC_0000278.jpg 124 | ISIC_0010263.jpg 125 | ISIC_0013039.jpg 126 | ISIC_0014325.jpg 127 | ISIC_0014891.jpg 128 | ISIC_0009201.jpg 129 | ISIC_0002489.jpg 130 | ISIC_0007344.jpg 131 | ISIC_0010189.jpg 132 | ISIC_0013579.jpg 133 | ISIC_0000503.jpg 134 | ISIC_0014157.jpg 135 | ISIC_0016026.jpg 136 | ISIC_0000172.jpg 137 | ISIC_0014675.jpg 138 | ISIC_0013977.jpg 139 | ISIC_0010598.jpg 140 | ISIC_0000201.jpg 141 | ISIC_0015124.jpg 142 | ISIC_0013554.jpg 143 | ISIC_0008626.jpg 144 | ISIC_0013637.jpg 145 | ISIC_0014951.jpg 146 | ISIC_0000370.jpg 147 | ISIC_0013690.jpg 148 | ISIC_0001185.jpg 149 | ISIC_0000086.jpg 150 | ISIC_0000075.jpg 151 | ISIC_0012835.jpg 152 | ISIC_0013813.jpg 153 | ISIC_0014783.jpg 154 | ISIC_0001191.jpg 155 | ISIC_0013397.jpg 156 | ISIC_0013416.jpg 157 | ISIC_0001372.jpg 158 | ISIC_0005187.jpg 159 | ISIC_0015102.jpg 160 | ISIC_0004309.jpg 161 | ISIC_0002780.jpg 162 | ISIC_0014324.jpg 163 | ISIC_0015129.jpg 164 | ISIC_0001960.jpg 165 | ISIC_0011225.jpg 166 | ISIC_0010459.jpg 167 | ISIC_0000246.jpg 168 | ISIC_0011173.jpg 169 | ISIC_0010568.jpg 170 | ISIC_0010553.jpg 171 | ISIC_0014573.jpg 172 | ISIC_0013712.jpg 173 | ISIC_0015944.jpg 174 | ISIC_0013512.jpg 175 | ISIC_0013652.jpg 176 | ISIC_0000232.jpg 177 | ISIC_0015999.jpg 178 | ISIC_0014587.jpg 179 | ISIC_0015062.jpg 180 | ISIC_0013922.jpg 181 | ISIC_0000240.jpg 182 | ISIC_0011128.jpg 183 | ISIC_0000102.jpg 184 | ISIC_0010077.jpg 185 | ISIC_0000393.jpg 186 | ISIC_0000426.jpg 187 | ISIC_0012653.jpg 188 | ISIC_0014610.jpg 189 | ISIC_0013836.jpg 190 | ISIC_0000200.jpg 191 | ISIC_0010043.jpg 192 | ISIC_0011352.jpg 193 | ISIC_0014936.jpg 194 | ISIC_0014289.jpg 195 | ISIC_0012351.jpg 196 | ISIC_0015952.jpg 197 | ISIC_0012246.jpg 198 | ISIC_0014974.jpg 199 | ISIC_0014790.jpg 200 | ISIC_0015273.jpg 201 | ISIC_0014838.jpg 202 | ISIC_0014748.jpg 203 | ISIC_0012216.jpg 204 | ISIC_0015283.jpg 205 | ISIC_0012335.jpg 206 | ISIC_0014385.jpg 207 | ISIC_0014174.jpg 208 | ISIC_0010605.jpg 209 | ISIC_0015617.jpg 210 | ISIC_0015593.jpg 211 | ISIC_0000007.jpg 212 | ISIC_0012516.jpg 213 | ISIC_0013663.jpg 214 | ISIC_0013056.jpg 215 | ISIC_0000350.jpg 216 | ISIC_0013835.jpg 217 | ISIC_0012203.jpg 218 | ISIC_0015419.jpg 219 | ISIC_0000367.jpg 220 | ISIC_0012447.jpg 221 | ISIC_0010062.jpg 222 | ISIC_0009987.jpg 223 | ISIC_0013072.jpg 224 | ISIC_0013302.jpg 225 | ISIC_0010347.jpg 226 | ISIC_0000292.jpg 227 | ISIC_0011079.jpg 228 | ISIC_0000532.jpg 229 | ISIC_0013341.jpg 230 | ISIC_0003805.jpg 231 | ISIC_0000455.jpg 232 | ISIC_0013962.jpg 233 | ISIC_0012253.jpg 234 | ISIC_0011083.jpg 235 | ISIC_0000444.jpg 236 | ISIC_0000227.jpg 237 | ISIC_0014958.jpg 238 | ISIC_0014746.jpg 239 | ISIC_0011124.jpg 240 | ISIC_0014963.jpg 241 | ISIC_0013173.jpg 242 | ISIC_0010554.jpg 243 | ISIC_0010021.jpg 244 | ISIC_0009955.jpg 245 | ISIC_0015989.jpg 246 | ISIC_0014229.jpg 247 | ISIC_0013816.jpg 248 | ISIC_0010005.jpg 249 | ISIC_0000136.jpg 250 | ISIC_0010247.jpg 251 | ISIC_0013709.jpg 252 | ISIC_0012823.jpg 253 | ISIC_0013456.jpg 254 | ISIC_0014001.jpg 255 | ISIC_0016003.jpg 256 | ISIC_0010364.jpg 257 | ISIC_0012495.jpg 258 | ISIC_0016068.jpg 259 | ISIC_0013399.jpg 260 | ISIC_0000085.jpg 261 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EIU-Net 2 | 3 | EIU-Net: Enhanced Feature Extraction and Improved Skip Connections in U-Net For Skin Lesion Segmentation 4 | -------------------------------------------------------------------------------- /blocks/ASPP.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | import torch 3 | import torch.nn.functional as F 4 | 5 | 6 | class ASPPConv(nn.Sequential): 7 | def __init__(self, in_channels, out_channels, dilation): 8 | modules = [ 9 | nn.Conv2d(in_channels, out_channels, 3, padding=dilation, dilation=dilation, bias=False), 10 | nn.BatchNorm2d(out_channels), 11 | nn.ReLU6() 12 | ] 13 | super(ASPPConv, self).__init__(*modules) 14 | 15 | 16 | class ASPPPooling(nn.Sequential): 17 | def __init__(self, in_channels, out_channels): 18 | super(ASPPPooling, self).__init__( 19 | nn.AdaptiveAvgPool2d(1), 20 | nn.Conv2d(in_channels, out_channels, 1, bias=False), 21 | nn.BatchNorm2d(out_channels), 22 | nn.ReLU6()) 23 | 24 | def forward(self, x): 25 | size = x.shape[-2:] 26 | x = super(ASPPPooling, self).forward(x) 27 | return F.interpolate(x, size=size, mode='bilinear', align_corners=False) 28 | 29 | 30 | class ASPP(nn.Module): 31 | def __init__(self, in_channels, atrous_rates): 32 | super(ASPP, self).__init__() 33 | out_channels = 512 34 | modules = [] 35 | modules.append(nn.Sequential( 36 | nn.Conv2d(in_channels, out_channels, 1, bias=False), 37 | nn.BatchNorm2d(out_channels), 38 | nn.ReLU6())) 39 | rate1, rate2, rate3 = tuple(atrous_rates) 40 | modules.append(ASPPConv(in_channels, out_channels, rate1)) 41 | modules.append(ASPPConv(in_channels, out_channels, rate2)) 42 | modules.append(ASPPConv(in_channels, out_channels, rate3)) 43 | modules.append(ASPPPooling(in_channels, out_channels)) 44 | self.convs = nn.ModuleList(modules) 45 | self.project = nn.Sequential( 46 | nn.Conv2d(5 * out_channels, out_channels, 1, bias=False), 47 | nn.BatchNorm2d(out_channels), 48 | nn.ReLU6(), 49 | nn.Dropout(0.5)) 50 | 51 | def forward(self, x): 52 | res = [] 53 | for conv in self.convs: 54 | res.append(conv(x)) 55 | res = torch.cat(res, dim=1) 56 | return self.project(res) 57 | -------------------------------------------------------------------------------- /blocks/EPSA.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class SEWeightModule(nn.Module): 6 | 7 | def __init__(self, channels, reduction=16): 8 | super(SEWeightModule, self).__init__() 9 | self.avg_pool = nn.AdaptiveAvgPool2d(1) 10 | self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1, padding=0) 11 | self.relu = nn.ReLU(inplace=True) 12 | self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1, padding=0) 13 | self.sigmoid = nn.Sigmoid() 14 | 15 | def forward(self, x): 16 | out = self.avg_pool(x) 17 | out = self.fc1(out) 18 | out = self.relu(out) 19 | out = self.fc2(out) 20 | weight = self.sigmoid(out) 21 | 22 | return weight 23 | 24 | 25 | def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1, groups=1): 26 | """standard convolution with padding""" 27 | return nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, 28 | padding=padding, dilation=dilation, groups=groups, bias=False) 29 | 30 | 31 | def conv1x1(in_planes, out_planes, stride=1): 32 | """1x1 convolution""" 33 | return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) 34 | 35 | 36 | class PSAModule(nn.Module): 37 | 38 | def __init__(self, inplans, planes, conv_kernels=[3, 5, 7, 9], stride=1, conv_groups=[1, 4, 8, 16]): 39 | super(PSAModule, self).__init__() 40 | self.conv_1 = conv(inplans, planes // 4, kernel_size=conv_kernels[0], padding=conv_kernels[0] // 2, 41 | stride=stride, groups=conv_groups[0]) 42 | self.conv_2 = conv(inplans, planes // 4, kernel_size=conv_kernels[1], padding=conv_kernels[1] // 2, 43 | stride=stride, groups=conv_groups[1]) 44 | self.conv_3 = conv(inplans, planes // 4, kernel_size=conv_kernels[2], padding=conv_kernels[2] // 2, 45 | stride=stride, groups=conv_groups[2]) 46 | self.conv_4 = conv(inplans, planes // 4, kernel_size=conv_kernels[3], padding=conv_kernels[3] // 2, 47 | stride=stride, groups=conv_groups[3]) 48 | self.se = SEWeightModule(planes // 4) 49 | self.split_channel = planes // 4 50 | self.softmax = nn.Softmax(dim=1) 51 | 52 | def forward(self, x): 53 | batch_size = x.shape[0] 54 | x1 = self.conv_1(x) 55 | x2 = self.conv_2(x) 56 | x3 = self.conv_3(x) 57 | x4 = self.conv_4(x) 58 | 59 | feats = torch.cat((x1, x2, x3, x4), dim=1) 60 | feats = feats.view(batch_size, 4, self.split_channel, feats.shape[2], feats.shape[3]) 61 | 62 | x1_se = self.se(x1) 63 | x2_se = self.se(x2) 64 | x3_se = self.se(x3) 65 | x4_se = self.se(x4) 66 | 67 | x_se = torch.cat((x1_se, x2_se, x3_se, x4_se), dim=1) 68 | attention_vectors = x_se.view(batch_size, 4, self.split_channel, 1, 1) 69 | attention_vectors = self.softmax(attention_vectors) 70 | feats_weight = feats * attention_vectors 71 | for i in range(4): 72 | x_se_weight_fp = feats_weight[:, i, :, :] 73 | if i == 0: 74 | out = x_se_weight_fp 75 | else: 76 | out = torch.cat((x_se_weight_fp, out), 1) 77 | 78 | return out 79 | 80 | 81 | class EPSABlock(nn.Module): 82 | expansion = 4 83 | 84 | def __init__(self, inplanes, planes, stride=1, downsample=None, norm_layer=None, conv_kernels=[3, 5, 7, 9], 85 | conv_groups=[1, 4, 8, 16]): 86 | super(EPSABlock, self).__init__() 87 | if norm_layer is None: 88 | norm_layer = nn.BatchNorm2d 89 | # Both self.conv2 and self.downsample layers downsample the input when stride != 1 90 | self.conv1 = conv1x1(inplanes, planes) 91 | self.bn1 = norm_layer(planes) 92 | self.conv2 = PSAModule(planes, planes, stride=stride, conv_kernels=conv_kernels, conv_groups=conv_groups) 93 | self.bn2 = norm_layer(planes) 94 | self.conv3 = conv1x1(planes, planes * self.expansion) 95 | self.bn3 = norm_layer(planes * self.expansion) 96 | self.relu = nn.ReLU(inplace=True) 97 | self.downsample = downsample 98 | self.stride = stride 99 | 100 | self.shortcut = nn.Sequential( 101 | conv1x1(inplanes, planes * self.expansion), 102 | nn.BatchNorm2d(planes * self.expansion) 103 | ) 104 | 105 | def forward(self, x): 106 | identity = self.shortcut(x) 107 | 108 | out = self.conv1(x) 109 | out = self.bn1(out) 110 | out = self.relu(out) 111 | 112 | out = self.conv2(out) 113 | out = self.bn2(out) 114 | out = self.relu(out) 115 | 116 | out = self.conv3(out) 117 | out = self.bn3(out) 118 | 119 | if self.downsample is not None: 120 | identity = self.downsample(x) 121 | 122 | out = out + identity 123 | out = self.relu(out) 124 | return out 125 | -------------------------------------------------------------------------------- /blocks/InvertedResidual.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | def Conv3x3BNReLU(in_channels, out_channels, stride, groups): 7 | return nn.Sequential( 8 | nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1, 9 | groups=groups), 10 | nn.BatchNorm2d(out_channels), 11 | nn.ReLU(inplace=True) 12 | ) 13 | 14 | 15 | def Conv1x1BNReLU(in_channels, out_channels): 16 | return nn.Sequential( 17 | nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1), 18 | nn.BatchNorm2d(out_channels), 19 | nn.ReLU(inplace=True) 20 | ) 21 | 22 | 23 | def Conv1x1BN(in_channels, out_channels): 24 | return nn.Sequential( 25 | nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1), 26 | nn.BatchNorm2d(out_channels) 27 | ) 28 | 29 | 30 | class InvertedResidual(nn.Module): 31 | def __init__(self, in_channels, out_channels, stride=1, expansion_factor=6): 32 | super(InvertedResidual, self).__init__() 33 | self.stride = stride 34 | mid_channels = (in_channels * expansion_factor) 35 | 36 | self.bottleneck = nn.Sequential( 37 | Conv1x1BNReLU(in_channels, mid_channels), 38 | Conv3x3BNReLU(mid_channels, mid_channels, stride, groups=mid_channels), 39 | Conv1x1BN(mid_channels, out_channels) 40 | ) 41 | 42 | if self.stride == 1: 43 | self.shortcut = Conv1x1BN(in_channels, out_channels) 44 | 45 | def forward(self, x): 46 | out = self.bottleneck(x) 47 | out = (out + self.shortcut(x)) if self.stride == 1 else out 48 | return out 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /blocks/MulScale_Att.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | def xy_avg_max(x_y: list): 7 | res_avg = [] 8 | res_max = [] 9 | for idx in x_y: 10 | avg_pool = F.adaptive_avg_pool2d(idx, 1) 11 | max_pool = F.adaptive_max_pool2d(idx, 1) 12 | res_avg.append(avg_pool) 13 | res_max.append(max_pool) 14 | res = res_avg + res_max 15 | return torch.cat(res, dim=1) 16 | 17 | 18 | class ChannelAtt(nn.Module): 19 | def __init__(self, channels): 20 | super(ChannelAtt, self).__init__() 21 | self.channels = channels 22 | 23 | self.bn2 = nn.BatchNorm2d(self.channels, momentum=0.9, affine=True) 24 | 25 | def forward(self, x): 26 | residual = x 27 | 28 | x = self.bn2(x) 29 | weight_bn = self.bn2.weight.data.abs() / torch.sum(self.bn2.weight.data.abs()) 30 | x = x.permute(0, 2, 3, 1).contiguous() 31 | x = torch.mul(weight_bn, x) 32 | x = x.permute(0, 3, 1, 2).contiguous() 33 | 34 | x = torch.sigmoid(x) * residual # 35 | 36 | return x 37 | 38 | 39 | class MultiScaleAttention(nn.Module): 40 | def __init__(self, x_ch, y_ch, out_ch, resize_mode='bilinear'): 41 | super(MultiScaleAttention, self).__init__() 42 | self.conv_x = nn.Sequential( 43 | nn.Conv2d(x_ch, y_ch, kernel_size=3, padding=1, bias=False), 44 | nn.BatchNorm2d(y_ch, momentum=0.9), 45 | nn.ReLU(inplace=True), 46 | ) 47 | 48 | self.conv_out = nn.Sequential( 49 | nn.Conv2d(y_ch, out_ch, kernel_size=3, padding=1, bias=False), 50 | nn.BatchNorm2d(out_ch, momentum=0.9), 51 | nn.ReLU(inplace=True), 52 | ) 53 | 54 | self.resize_mode = resize_mode 55 | 56 | self.conv_xy = nn.Sequential( 57 | nn.Conv2d(4 * y_ch, y_ch // 2, kernel_size=1, bias=False), 58 | nn.BatchNorm2d(y_ch // 2, momentum=0.9), 59 | nn.ReLU(inplace=True), 60 | nn.Conv2d(y_ch // 2, y_ch, kernel_size=1, bias=False), 61 | nn.BatchNorm2d(y_ch, momentum=0.9), 62 | ) 63 | self.channel_att = ChannelAtt(channels=y_ch) 64 | 65 | def prepare(self, x, y): 66 | x = self.prepare_x(x) 67 | y = self.prepare_y(x, y) 68 | return x, y 69 | 70 | def prepare_x(self, x): 71 | x = self.conv_x(x) 72 | return x 73 | 74 | def prepare_y(self, x, y): 75 | y_expand = F.interpolate(y, x.shape[2:], mode=self.resize_mode, align_corners=True) 76 | return y_expand 77 | 78 | def fuse(self, x, y): 79 | attention = xy_avg_max([x, y]) 80 | attention = self.channel_att(self.conv_xy(attention)) 81 | 82 | out = x * attention + y * (1 - attention) 83 | #out = self.conv_out(out) 84 | return out 85 | 86 | def forward(self, x, y): 87 | 88 | x, y = self.prepare(x, y) 89 | out = self.fuse(x, y) 90 | return out 91 | 92 | -------------------------------------------------------------------------------- /blocks/Soft_pooling.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | 4 | class SoftPooling2D(torch.nn.Module): 5 | def __init__(self, kernel_size, strides=None, padding=0, ceil_mode=False, count_include_pad=True, 6 | divisor_override=None): 7 | super(SoftPooling2D, self).__init__() 8 | self.avgpool = torch.nn.AvgPool2d(kernel_size, strides, padding, ceil_mode, count_include_pad, divisor_override) 9 | 10 | def forward(self, x): 11 | x_exp = torch.exp(x) 12 | x_exp_pool = self.avgpool(x_exp) 13 | x = self.avgpool(x_exp * x) 14 | return x / x_exp_pool 15 | 16 | 17 | def downsample_soft(): 18 | return SoftPooling2D(2, 2) 19 | -------------------------------------------------------------------------------- /blocks/__pycache__/ASPP.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/ASPP.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/EPSA.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/EPSA.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/InvertedResidual.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/InvertedResidual.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/MulScale_Att.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/MulScale_Att.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/Soft_pooling.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/Soft_pooling.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/reshape.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/reshape.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/__pycache__/scale_attention.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/blocks/__pycache__/scale_attention.cpython-38.pyc -------------------------------------------------------------------------------- /blocks/reshape.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class reshaped(nn.Module): 6 | def __init__(self, in_size, out_size, scale_factor): 7 | super(reshaped, self).__init__() 8 | self.reshape = nn.Sequential(nn.Conv2d(in_size, out_size, kernel_size=1, stride=1, padding=0), 9 | nn.Upsample(size=scale_factor, mode='bilinear'), ) 10 | 11 | def forward(self, input): 12 | return self.reshape(input) 13 | -------------------------------------------------------------------------------- /blocks/scale_attention.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.nn import functional as F 4 | 5 | 6 | def conv1x1(in_planes, out_planes, stride=1, bias=False): 7 | "1x1 convolution" 8 | return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, 9 | padding=0, bias=bias) 10 | 11 | 12 | # # SE block add to U-net 13 | def conv3x3(in_planes, out_planes, stride=1, bias=False, group=1): 14 | """3x3 convolution with padding""" 15 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, groups=group, bias=bias) 16 | 17 | 18 | # # CBAM Convolutional block attention module 19 | class BasicConv(nn.Module): 20 | def __init__(self, in_planes, out_planes, kernel_size, stride=1, padding=0, dilation=1, groups=1, 21 | relu=True, bn=True, bias=False): 22 | super(BasicConv, self).__init__() 23 | self.out_channels = out_planes 24 | self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, 25 | dilation=dilation, groups=groups, bias=bias) 26 | self.bn = nn.BatchNorm2d(out_planes, eps=1e-5, momentum=0.01, affine=True) if bn else None 27 | self.relu = nn.ReLU() if relu else None 28 | 29 | def forward(self, x): 30 | x = self.conv(x) 31 | if self.bn is not None: 32 | x = self.bn(x) 33 | if self.relu is not None: 34 | x = self.relu(x) 35 | return x 36 | 37 | 38 | class Flatten(nn.Module): 39 | def forward(self, x): 40 | return x.view(x.size(0), -1) 41 | 42 | 43 | class SoftPooling2D(torch.nn.Module): 44 | def __init__(self, kernel_size, strides=None, padding=0, ceil_mode=False, count_include_pad=True, 45 | divisor_override=None): 46 | super(SoftPooling2D, self).__init__() 47 | self.avgpool = torch.nn.AvgPool2d(kernel_size, strides, padding, ceil_mode, count_include_pad, divisor_override) 48 | 49 | def forward(self, x): 50 | x_exp = torch.exp(x) 51 | x_exp_pool = self.avgpool(x_exp) 52 | x = self.avgpool(x_exp * x) 53 | return x / x_exp_pool 54 | 55 | 56 | def downsample_soft(): 57 | return SoftPooling2D(2, 2) 58 | 59 | 60 | class ChannelGate(nn.Module): 61 | def __init__(self, gate_channels, reduction_ratio=16, pool_types=['avg', 'max', 'sp']): 62 | super(ChannelGate, self).__init__() 63 | self.gate_channels = gate_channels 64 | self.mlp = nn.Sequential( 65 | Flatten(), 66 | nn.Linear(gate_channels, gate_channels // reduction_ratio), 67 | nn.ReLU(), 68 | nn.Linear(gate_channels // reduction_ratio, gate_channels) 69 | ) 70 | self.pool_types = pool_types 71 | 72 | def forward(self, x): 73 | channel_att_sum = None 74 | for pool_type in self.pool_types: 75 | if pool_type == 'avg': 76 | avg_pool = F.avg_pool2d(x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) 77 | channel_att_raw = self.mlp(avg_pool) 78 | elif pool_type == 'max': 79 | max_pool = F.max_pool2d(x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) 80 | channel_att_raw = self.mlp(max_pool) 81 | elif pool_type == 'lp': 82 | lp_pool = F.lp_pool2d(x, 2, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) 83 | channel_att_raw = self.mlp(lp_pool) 84 | elif pool_type == 'sp': 85 | sf_pool_f = SoftPooling2D((x.size(2), x.size(3)), (x.size(2), x.size(3))) 86 | sf_pool = sf_pool_f(x) 87 | channel_att_raw = self.mlp(sf_pool) 88 | elif pool_type == 'lse': 89 | # LSE pool only 90 | lse_pool = logsumexp_2d(x) 91 | channel_att_raw = self.mlp(lse_pool) 92 | 93 | if channel_att_sum is None: 94 | channel_att_sum = channel_att_raw 95 | else: 96 | channel_att_sum = channel_att_sum + channel_att_raw 97 | 98 | # scalecoe = F.sigmoid(channel_att_sum) 99 | channel_att_sum = channel_att_sum.reshape(channel_att_sum.shape[0], 4, 4) 100 | avg_weight = torch.mean(channel_att_sum, dim=2).unsqueeze(2) 101 | avg_weight = avg_weight.expand(channel_att_sum.shape[0], 4, 4).reshape(channel_att_sum.shape[0], 16) 102 | scale = F.sigmoid(avg_weight).unsqueeze(2).unsqueeze(3).expand_as(x) 103 | 104 | return x * scale, scale 105 | 106 | 107 | def logsumexp_2d(tensor): 108 | tensor_flatten = tensor.view(tensor.size(0), tensor.size(1), -1) 109 | s, _ = torch.max(tensor_flatten, dim=2, keepdim=True) 110 | outputs = s + (tensor_flatten - s).exp().sum(dim=2, keepdim=True).log() 111 | return outputs 112 | 113 | 114 | class ChannelPool(nn.Module): 115 | def forward(self, x): 116 | return torch.cat((torch.max(x, 1)[0].unsqueeze(1), torch.mean(x, 1).unsqueeze(1)), dim=1) 117 | 118 | 119 | class SpatialGate(nn.Module): 120 | def __init__(self): 121 | super(SpatialGate, self).__init__() 122 | kernel_size = 7 123 | self.compress = ChannelPool() 124 | self.spatial = BasicConv(2, 1, kernel_size, stride=1, padding=(kernel_size - 1) // 2, relu=False) 125 | 126 | def forward(self, x): 127 | x_compress = self.compress(x) 128 | x_out = self.spatial(x_compress) 129 | scale = F.sigmoid(x_out) # broadcasting 130 | # spa_scale = scale.expand_as(x) 131 | # print(spa_scale.shape) 132 | return x * scale, scale 133 | 134 | 135 | class SpatialAtten(nn.Module): 136 | def __init__(self, in_size, out_size, kernel_size=3, stride=1): 137 | super(SpatialAtten, self).__init__() 138 | self.conv1 = BasicConv(in_size, out_size, kernel_size, stride=stride, 139 | padding=(kernel_size - 1) // 2, relu=True) 140 | self.conv2 = BasicConv(out_size, out_size, kernel_size=1, stride=stride, 141 | padding=0, relu=True, bn=False) 142 | 143 | def forward(self, x): 144 | residual = x 145 | x_out = self.conv1(x) 146 | x_out = self.conv2(x_out) 147 | spatial_att = F.sigmoid(x_out).unsqueeze(4).permute(0, 1, 4, 2, 3) 148 | spatial_att = spatial_att.expand(spatial_att.shape[0], 4, 4, spatial_att.shape[3], 149 | spatial_att.shape[4]).reshape( 150 | spatial_att.shape[0], 16, spatial_att.shape[3], spatial_att.shape[4]) 151 | x_out = residual * spatial_att 152 | 153 | x_out += residual 154 | 155 | return x_out, spatial_att 156 | 157 | 158 | class Scale_atten_block_softpool(nn.Module): 159 | def __init__(self, gate_channels, reduction_ratio=16, pool_types=['lp', 'sp'], no_spatial=False): 160 | super(Scale_atten_block_softpool, self).__init__() 161 | self.ChannelGate = ChannelGate(gate_channels, reduction_ratio, pool_types) 162 | self.no_spatial = no_spatial 163 | if not no_spatial: 164 | self.SpatialGate = SpatialAtten(gate_channels, gate_channels // reduction_ratio) 165 | 166 | def forward(self, x): 167 | x_out, ca_atten = self.ChannelGate(x) 168 | if not self.no_spatial: 169 | x_out, sa_atten = self.SpatialGate(x_out) 170 | 171 | return x_out, ca_atten, sa_atten 172 | 173 | 174 | class scale_atten_convblock_softpool(nn.Module): 175 | def __init__(self, in_size, out_size, stride=1, downsample=None, use_cbam=True, no_spatial=False, drop_out=False): 176 | super(scale_atten_convblock_softpool, self).__init__() 177 | # if stride != 1 or in_size != out_size: 178 | # downsample = nn.Sequential( 179 | # nn.Conv2d(in_size, out_size, 180 | # kernel_size=1, stride=stride, bias=False), 181 | # nn.BatchNorm2d(out_size), 182 | # ) 183 | self.downsample = downsample 184 | self.stride = stride 185 | self.no_spatial = no_spatial 186 | self.dropout = drop_out 187 | 188 | self.relu = nn.ReLU(inplace=True) 189 | self.conv3 = conv3x3(in_size, out_size) 190 | self.bn3 = nn.BatchNorm2d(out_size) 191 | 192 | if use_cbam: 193 | self.cbam = Scale_atten_block_softpool(in_size, reduction_ratio=4, no_spatial=self.no_spatial) # out_size 194 | else: 195 | self.cbam = None 196 | 197 | def forward(self, x): 198 | residual = x 199 | 200 | if self.downsample is not None: 201 | residual = self.downsample(x) 202 | 203 | if not self.cbam is None: 204 | out, scale_c_atten, scale_s_atten = self.cbam(x) 205 | 206 | # scale_c_atten = nn.Sigmoid()(scale_c_atten) 207 | # scale_s_atten = nn.Sigmoid()(scale_s_atten) 208 | # scale_atten = channel_atten_c * spatial_atten_s 209 | 210 | # scale_max = torch.argmax(scale_atten, dim=1, keepdim=True) 211 | # scale_max_soft = get_soft_label(input_tensor=scale_max, num_class=8) 212 | # scale_max_soft = scale_max_soft.permute(0, 3, 1, 2) 213 | # scale_atten_soft = scale_atten * scale_max_soft 214 | 215 | out += residual 216 | out = self.relu(out) 217 | out = self.conv3(out) 218 | out = self.bn3(out) 219 | out = self.relu(out) 220 | 221 | if self.dropout: 222 | out = nn.Dropout2d(0.5)(out) 223 | 224 | return out 225 | -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import cv2 4 | import numpy as np 5 | import torch 6 | import torch.utils.data 7 | 8 | 9 | class Dataset(torch.utils.data.Dataset): 10 | def __init__(self, img_ids, img_dir, mask_dir, img_ext, mask_ext, num_classes, transform=None): 11 | self.img_ids = img_ids 12 | self.img_dir = img_dir 13 | self.mask_dir = mask_dir 14 | self.img_ext = img_ext 15 | self.mask_ext = mask_ext 16 | self.num_classes = num_classes 17 | self.transform = transform 18 | 19 | def __len__(self): 20 | return len(self.img_ids) 21 | 22 | def __getitem__(self, idx): 23 | img_id = self.img_ids[idx] 24 | 25 | img = cv2.imread(os.path.join(self.img_dir, img_id + self.img_ext)) 26 | 27 | mask = [] 28 | 29 | for i in range(self.num_classes): 30 | mask.append(cv2.imread(os.path.join(self.mask_dir, img_id + '_segmentation' + self.mask_ext), 31 | cv2.IMREAD_GRAYSCALE)[..., None]) 32 | mask = np.dstack(mask) 33 | 34 | if self.transform is not None: 35 | augmented = self.transform(image=img, mask=mask) 36 | img = augmented['image'] 37 | mask = augmented['mask'] 38 | 39 | img = img.astype('float32') / 255 40 | img = img.transpose(2, 0, 1) # (H,W,C)变为(C,H,W) 41 | mask = mask.astype('float32') / 255 42 | mask = mask.transpose(2, 0, 1) 43 | 44 | return img, mask, {'img_id': img_id} 45 | -------------------------------------------------------------------------------- /losses_new.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | 6 | try: 7 | from LovaszSoftmax.pytorch.lovasz_losses import lovasz_hinge 8 | except ImportError: 9 | pass 10 | 11 | __all__ = ['BCEDiceLoss', 'LovaszHingeLoss', 'Combined_Bce_Dice_Loss'] 12 | 13 | 14 | class BCEDiceLoss(nn.Module): 15 | def __init__(self): 16 | super(BCEDiceLoss, self).__init__() 17 | 18 | def forward(self, input, target): 19 | bce = F.binary_cross_entropy_with_logits(input, target) 20 | smooth = 1e-5 21 | input = torch.sigmoid(input) 22 | num = target.size(0) 23 | input = input.view(num, -1) 24 | target = target.view(num, -1) 25 | intersection = (input * target) 26 | dice = (2. * intersection.sum(1) + smooth) / (input.sum(1) + target.sum(1) + smooth) 27 | dice = 1 - dice.sum() / num 28 | return 0.5 * bce + dice 29 | 30 | 31 | class LovaszHingeLoss(nn.Module): 32 | def __init__(self): 33 | super(LovaszHingeLoss, self).__init__() 34 | 35 | def forward(self, input, target): 36 | input = input.squeeze(1) 37 | target = target.squeeze(1) 38 | loss = lovasz_hinge(input, target, per_image=True) 39 | 40 | return loss 41 | 42 | 43 | def dice_coeff(pred, target): 44 | num = target.size(0) 45 | smooth = 1. 46 | m1 = pred.view(num, -1) 47 | m2 = target.view(num, -1) 48 | intersection = (m1 * m2).sum() 49 | 50 | return (2. * intersection + smooth) / (m1.sum() + m2.sum() + smooth) 51 | 52 | 53 | class SoftDiceLoss_1(nn.Module): 54 | def __init__(self, weight=None, size_average=True): 55 | super(SoftDiceLoss_1, self).__init__() 56 | 57 | def forward(self, logits, targets): 58 | num = targets.size(0) 59 | smooth = 1. 60 | probs = F.sigmoid(logits) 61 | m1 = probs.view(num, -1) 62 | m2 = targets.view(num, -1) 63 | intersection = (m1 * m2) 64 | score = 2. * (intersection.sum(1) + smooth) / (m1.sum(1) + m2.sum(1) + smooth) 65 | # score = 1 - score.sum() / num 66 | return score 67 | 68 | 69 | class BCELoss2d(nn.Module): 70 | def __init__(self, weight=None, size_average=True): 71 | super(BCELoss2d, self).__init__() 72 | self.bce_loss = nn.BCELoss(weight, size_average) 73 | 74 | def forward(self, logits, targets): 75 | probs = F.sigmoid(logits) # 二分类问题,sigmoid等价于softmax 76 | probs_flat = probs.view(-1) 77 | targets_flat = targets.view(-1) 78 | 79 | return self.bce_loss(probs_flat, targets_flat) 80 | 81 | 82 | ''' 83 | class SoftDiceLoss(nn.Module): 84 | def __init__(self, smooth=1., dims=(-2, -1)): 85 | super(SoftDiceLoss, self).__init__() 86 | self.smooth = smooth 87 | self.dims = dims 88 | 89 | def forward(self, x, y): 90 | tp = (x * y).sum(self.dims) 91 | fp = (x * (1 - y)).sum(self.dims) 92 | fn = ((1 - x) * y).sum(self.dims) 93 | 94 | dc = (2 * tp + self.smooth) / (2 * tp + fp + fn + self.smooth) 95 | dc = dc.mean() 96 | 97 | return 1 - dc 98 | 99 | 100 | class Combined_Bce_Dice_Loss(nn.Module): 101 | def __init__(self): 102 | super(Combined_Bce_Dice_Loss, self).__init__() 103 | self.bce_fn = nn.BCEWithLogitsLoss() 104 | self.dice_fn = SoftDiceLoss() 105 | 106 | def forward(self, y_pred, y_true): 107 | bce = self.bce_fn(y_pred, y_true) 108 | dice = self.dice_fn(y_pred.sigmoid(), y_true) 109 | return 0.8 * bce + 0.2 * dice 110 | ''' 111 | 112 | 113 | def sum_tensor(inp, axes, keepdim=False): 114 | axes = np.unique(axes).astype(int) 115 | if keepdim: 116 | for ax in axes: 117 | inp = inp.sum(int(ax), keepdim=True) 118 | else: 119 | for ax in sorted(axes, reverse=True): 120 | inp = inp.sum(int(ax)) 121 | return inp 122 | 123 | 124 | def get_tp_fp_fn_tn(net_output, gt, axes=None, mask=None, square=False): 125 | """ 126 | net_output must be (b, c, x, y(, z))) 127 | gt must be a label map (shape (b, 1, x, y(, z)) OR shape (b, x, y(, z))) or one hot encoding (b, c, x, y(, z)) 128 | if mask is provided it must have shape (b, 1, x, y(, z))) 129 | :param net_output: 130 | :param gt: 131 | :param axes: can be (, ) = no summation 132 | :param mask: mask must be 1 for valid pixels and 0 for invalid pixels 133 | :param square: if True then fp, tp and fn will be squared before summation 134 | :return: 135 | """ 136 | if axes is None: 137 | axes = tuple(range(2, len(net_output.size()))) 138 | 139 | shp_x = net_output.shape 140 | shp_y = gt.shape 141 | 142 | with torch.no_grad(): 143 | if len(shp_x) != len(shp_y): 144 | gt = gt.view((shp_y[0], 1, *shp_y[1:])) 145 | 146 | if all([i == j for i, j in zip(net_output.shape, gt.shape)]): 147 | # if this is the case then gt is probably already a one hot encoding 148 | y_onehot = gt 149 | else: 150 | gt = gt.long() 151 | y_onehot = torch.zeros(shp_x) 152 | if net_output.device.type == "cuda": 153 | y_onehot = y_onehot.cuda(net_output.device.index) 154 | y_onehot.scatter_(1, gt, 1) 155 | 156 | tp = net_output * y_onehot 157 | fp = net_output * (1 - y_onehot) 158 | fn = (1 - net_output) * y_onehot 159 | tn = (1 - net_output) * (1 - y_onehot) 160 | 161 | if mask is not None: 162 | tp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tp, dim=1)), dim=1) 163 | fp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fp, dim=1)), dim=1) 164 | fn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fn, dim=1)), dim=1) 165 | tn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tn, dim=1)), dim=1) 166 | 167 | if square: 168 | tp = tp ** 2 169 | fp = fp ** 2 170 | fn = fn ** 2 171 | tn = tn ** 2 172 | 173 | if len(axes) > 0: 174 | tp = sum_tensor(tp, axes, keepdim=False) 175 | fp = sum_tensor(fp, axes, keepdim=False) 176 | fn = sum_tensor(fn, axes, keepdim=False) 177 | tn = sum_tensor(tn, axes, keepdim=False) 178 | 179 | return tp, fp, fn, tn 180 | 181 | 182 | class SoftDiceLoss_New(nn.Module): 183 | def __init__(self, apply_nonlin=None, batch_dice=False, do_bg=True, smooth=1.): 184 | """ 185 | """ 186 | super(SoftDiceLoss_New, self).__init__() 187 | 188 | self.do_bg = do_bg 189 | self.batch_dice = batch_dice 190 | self.apply_nonlin = apply_nonlin 191 | self.smooth = smooth 192 | 193 | def forward(self, x, y, loss_mask=None): 194 | shp_x = x.shape 195 | 196 | if self.batch_dice: 197 | axes = [0] + list(range(2, len(shp_x))) 198 | else: 199 | axes = list(range(2, len(shp_x))) 200 | 201 | if self.apply_nonlin is not None: 202 | x = self.apply_nonlin(x) 203 | 204 | tp, fp, fn, _ = get_tp_fp_fn_tn(x, y, axes, loss_mask, False) 205 | 206 | nominator = 2 * tp + self.smooth 207 | denominator = 2 * tp + fp + fn + self.smooth 208 | 209 | dc = nominator / (denominator + 1e-8) 210 | 211 | if not self.do_bg: 212 | if self.batch_dice: 213 | dc = dc[1:] 214 | else: 215 | dc = dc[:, 1:] 216 | dc = dc.mean() 217 | 218 | return 1 - dc 219 | 220 | 221 | class Combined_Bce_Dice_Loss(nn.Module): 222 | def __init__(self, aggregate="sum"): 223 | """ 224 | DO NOT APPLY NONLINEARITY IN YOUR NETWORK! 225 | THIS LOSS IS INTENDED TO BE USED FOR BRATS REGIONS ONLY 226 | :param soft_dice_kwargs: 227 | :param bce_kwargs: 228 | :param aggregate: 229 | """ 230 | super(Combined_Bce_Dice_Loss, self).__init__() 231 | self.aggregate = aggregate 232 | self.ce = nn.BCEWithLogitsLoss() 233 | self.dc = SoftDiceLoss_New(apply_nonlin=torch.sigmoid) 234 | 235 | def forward(self, net_output, target): 236 | ce_loss = self.ce(net_output, target) 237 | dc_loss = self.dc(net_output, target) 238 | 239 | if self.aggregate == "sum": 240 | result = 0.6 * ce_loss + 0.4 *dc_loss # 也可0.8+0.2的搭配 241 | else: 242 | raise NotImplementedError("nah son") # reserved for other stuff (later) 243 | 244 | return result 245 | -------------------------------------------------------------------------------- /metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn.functional as F 4 | 5 | from medpy import metric 6 | 7 | '''原本的''' 8 | 9 | 10 | def iou_score(output, target): 11 | smooth = 1e-5 12 | 13 | if torch.is_tensor(output): 14 | output = torch.sigmoid(output).data.cpu().numpy() 15 | if torch.is_tensor(target): 16 | target = target.data.cpu().numpy() 17 | output_ = output > 0.5 18 | target_ = target > 0.5 19 | intersection = (output_ & target_).sum() 20 | union = (output_ | target_).sum() 21 | iou = (intersection + smooth) / (union + smooth) 22 | dice = (2 * iou) / (iou + 1) 23 | # return iou, dice 24 | return iou, dice 25 | 26 | 27 | def dice_coef(output, target): 28 | smooth = 1e-5 29 | 30 | output = torch.sigmoid(output).view(-1).data.cpu().numpy() 31 | target = target.view(-1).data.cpu().numpy() 32 | intersection = (output * target).sum() 33 | 34 | return (2. * intersection + smooth) / \ 35 | (output.sum() + target.sum() + smooth) 36 | 37 | 38 | '''nnUnet metrics''' 39 | 40 | 41 | def assert_shape(test, reference): 42 | assert test.shape == reference.shape, "Shape mismatch: {} and {}".format( 43 | test.shape, reference.shape) 44 | 45 | 46 | class ConfusionMatrix: 47 | 48 | def __init__(self, test=None, reference=None): 49 | 50 | self.tp = None 51 | self.fp = None 52 | self.tn = None 53 | self.fn = None 54 | self.size = None 55 | self.reference_empty = None 56 | self.reference_full = None 57 | self.test_empty = None 58 | self.test_full = None 59 | self.set_reference(reference) 60 | self.set_test(test) 61 | 62 | def set_test(self, test): 63 | 64 | self.test = test 65 | self.reset() 66 | 67 | def set_reference(self, reference): 68 | 69 | self.reference = reference 70 | self.reset() 71 | 72 | def reset(self): 73 | 74 | self.tp = None 75 | self.fp = None 76 | self.tn = None 77 | self.fn = None 78 | self.size = None 79 | self.test_empty = None 80 | self.test_full = None 81 | self.reference_empty = None 82 | self.reference_full = None 83 | 84 | def compute(self): 85 | 86 | if self.test is None or self.reference is None: 87 | raise ValueError("'test' and 'reference' must both be set to compute confusion matrix.") 88 | 89 | assert_shape(self.test, self.reference) 90 | 91 | self.tp = int(((self.test != 0) * (self.reference != 0)).sum()) 92 | self.fp = int(((self.test != 0) * (self.reference == 0)).sum()) 93 | self.tn = int(((self.test == 0) * (self.reference == 0)).sum()) 94 | self.fn = int(((self.test == 0) * (self.reference != 0)).sum()) 95 | self.size = int(np.prod(self.reference.shape, dtype=np.int64)) 96 | self.test_empty = not np.any(self.test) 97 | self.test_full = np.all(self.test) 98 | self.reference_empty = not np.any(self.reference) 99 | self.reference_full = np.all(self.reference) 100 | 101 | def get_matrix(self): 102 | 103 | for entry in (self.tp, self.fp, self.tn, self.fn): 104 | if entry is None: 105 | self.compute() 106 | break 107 | 108 | return self.tp, self.fp, self.tn, self.fn 109 | 110 | def get_size(self): 111 | 112 | if self.size is None: 113 | self.compute() 114 | return self.size 115 | 116 | def get_existence(self): 117 | 118 | for case in (self.test_empty, self.test_full, self.reference_empty, self.reference_full): 119 | if case is None: 120 | self.compute() 121 | break 122 | 123 | return self.test_empty, self.test_full, self.reference_empty, self.reference_full 124 | 125 | 126 | def precision(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 127 | """TP / (TP + FP)""" 128 | if torch.is_tensor(test): 129 | test = torch.sigmoid(test).data.cpu().numpy() 130 | if torch.is_tensor(reference): 131 | reference = reference.data.cpu().numpy() 132 | test = test > 0.5 133 | reference = reference > 0.5 134 | 135 | if confusion_matrix is None: 136 | confusion_matrix = ConfusionMatrix(test, reference) 137 | 138 | tp, fp, tn, fn = confusion_matrix.get_matrix() 139 | test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence() 140 | 141 | if test_empty: 142 | if nan_for_nonexisting: 143 | return float("NaN") 144 | else: 145 | return 0. 146 | 147 | return float(tp / (tp + fp)) 148 | 149 | 150 | def recall(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 151 | """TP / (TP + FN)""" 152 | # if torch.is_tensor(test): 153 | # test = torch.sigmoid(test).data.cpu().numpy() 154 | # if torch.is_tensor(reference): 155 | # reference = reference.data.cpu().numpy() 156 | # test = test > 0.5 157 | # reference = reference > 0.5 158 | 159 | return sensitivity(test, reference, confusion_matrix, nan_for_nonexisting, **kwargs) 160 | 161 | 162 | def dice_co(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 163 | """2TP / (2TP + FP + FN)""" 164 | if torch.is_tensor(test): 165 | test = torch.sigmoid(test).data.cpu().numpy() 166 | if torch.is_tensor(reference): 167 | reference = reference.data.cpu().numpy() 168 | test = test > 0.5 169 | reference = reference > 0.5 170 | 171 | if confusion_matrix is None: 172 | confusion_matrix = ConfusionMatrix(test, reference) 173 | 174 | tp, fp, tn, fn = confusion_matrix.get_matrix() 175 | test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence() 176 | 177 | if test_empty and reference_empty: 178 | if nan_for_nonexisting: 179 | return float("NaN") 180 | else: 181 | return 0. 182 | 183 | return float(2. * tp / (2 * tp + fp + fn)) 184 | 185 | 186 | def jaccard(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 187 | """TP / (TP + FP + FN)""" 188 | 189 | if torch.is_tensor(test): 190 | test = torch.sigmoid(test).data.cpu().numpy() 191 | if torch.is_tensor(reference): 192 | reference = reference.data.cpu().numpy() 193 | test = test > 0.5 194 | reference = reference > 0.5 195 | 196 | if confusion_matrix is None: 197 | confusion_matrix = ConfusionMatrix(test, reference) 198 | 199 | tp, fp, tn, fn = confusion_matrix.get_matrix() 200 | test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence() 201 | 202 | if test_empty and reference_empty: 203 | if nan_for_nonexisting: 204 | return float("NaN") 205 | else: 206 | return 0. 207 | 208 | return float(tp / (tp + fp + fn)) 209 | 210 | 211 | def sensitivity(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 212 | """TP / (TP + FN)""" 213 | 214 | if torch.is_tensor(test): 215 | test = torch.sigmoid(test).data.cpu().numpy() 216 | if torch.is_tensor(reference): 217 | reference = reference.data.cpu().numpy() 218 | test = test > 0.5 219 | reference = reference > 0.5 220 | 221 | if confusion_matrix is None: 222 | confusion_matrix = ConfusionMatrix(test, reference) 223 | 224 | tp, fp, tn, fn = confusion_matrix.get_matrix() 225 | test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence() 226 | 227 | if reference_empty: 228 | if nan_for_nonexisting: 229 | return float("NaN") 230 | else: 231 | return 0. 232 | 233 | return float(tp / (tp + fn)) 234 | 235 | 236 | def specificity(test=None, reference=None, confusion_matrix=None, nan_for_nonexisting=True, **kwargs): 237 | """TN / (TN + FP)""" 238 | 239 | if torch.is_tensor(test): 240 | test = torch.sigmoid(test).data.cpu().numpy() 241 | if torch.is_tensor(reference): 242 | reference = reference.data.cpu().numpy() 243 | test = test > 0.5 244 | reference = reference > 0.5 245 | 246 | if confusion_matrix is None: 247 | confusion_matrix = ConfusionMatrix(test, reference) 248 | 249 | tp, fp, tn, fn = confusion_matrix.get_matrix() 250 | test_empty, test_full, reference_empty, reference_full = confusion_matrix.get_existence() 251 | 252 | if reference_full: 253 | if nan_for_nonexisting: 254 | return float("NaN") 255 | else: 256 | return 0. 257 | 258 | return float(tn / (tn + fp)) 259 | 260 | 261 | def accuracy(test=None, reference=None, confusion_matrix=None, **kwargs): 262 | """(TP + TN) / (TP + FP + FN + TN)""" 263 | 264 | if torch.is_tensor(test): 265 | test = torch.sigmoid(test).data.cpu().numpy() 266 | if torch.is_tensor(reference): 267 | reference = reference.data.cpu().numpy() 268 | test = test > 0.5 269 | reference = reference > 0.5 270 | 271 | if confusion_matrix is None: 272 | confusion_matrix = ConfusionMatrix(test, reference) 273 | 274 | tp, fp, tn, fn = confusion_matrix.get_matrix() 275 | 276 | return float((tp + tn) / (tp + fp + tn + fn)) 277 | 278 | 279 | '''C2FTrans''' 280 | 281 | 282 | def dice_coefficient(pred, gt, smooth=1e-5): 283 | """ computational formula: 284 | dice = 2TP/(FP + 2TP + FN) 285 | """ 286 | N = gt.shape[0] 287 | pred[pred >= 1] = 1 288 | gt[gt >= 1] = 1 289 | pred_flat = pred.reshape(N, -1) 290 | gt_flat = gt.reshape(N, -1) 291 | if (pred.sum() + gt.sum()) == 0: 292 | return 1 293 | # pred_flat = pred.view(N, -1) 294 | # gt_flat = gt.view(N, -1) 295 | intersection = (pred_flat * gt_flat).sum(1) 296 | unionset = pred_flat.sum(1) + gt_flat.sum(1) 297 | dice = (2 * intersection + smooth) / (unionset + smooth) 298 | return dice.sum() / N 299 | 300 | 301 | def sespiou_coefficient2(pred, gt, smooth=1e-5): 302 | """ computational formula: 303 | sensitivity = TP/(TP+FN) 304 | specificity = TN/(FP+TN) 305 | iou = TP/(FP+TP+FN) 306 | """ 307 | N = gt.shape[0] 308 | pred[pred >= 1] = 1 309 | gt[gt >= 1] = 1 310 | pred_flat = pred.reshape(N, -1) 311 | gt_flat = gt.reshape(N, -1) 312 | # pred_flat = pred.view(N, -1) 313 | # gt_flat = gt.view(N, -1) 314 | TP = (pred_flat * gt_flat).sum(1) 315 | FN = gt_flat.sum(1) - TP 316 | pred_flat_no = (pred_flat + 1) % 2 317 | gt_flat_no = (gt_flat + 1) % 2 318 | TN = (pred_flat_no * gt_flat_no).sum(1) 319 | FP = pred_flat.sum(1) - TP 320 | SE = (TP + smooth) / (TP + FN + smooth) 321 | SP = (TN + smooth) / (FP + TN + smooth) 322 | IOU = (TP + smooth) / (FP + TP + FN + smooth) 323 | Acc = (TP + TN + smooth) / (TP + FP + FN + TN + smooth) 324 | Precision = (TP + smooth) / (TP + FP + smooth) 325 | Recall = (TP + smooth) / (TP + FN + smooth) 326 | F1 = 2 * Precision * Recall / (Recall + Precision + smooth) 327 | return SE.sum() / N, SP.sum() / N, IOU.sum() / N, Acc.sum() / N, F1.sum() / N, Precision.sum() / N, Recall.sum() / N 328 | -------------------------------------------------------------------------------- /network.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | from blocks.InvertedResidual import InvertedResidual 6 | from blocks.ASPP import ASPP 7 | from blocks.Soft_pooling import downsample_soft 8 | from blocks.EPSA import EPSABlock 9 | from blocks.MulScale_Att import MultiScaleAttention 10 | from blocks.scale_attention import scale_atten_convblock_softpool 11 | from blocks.reshape import reshaped 12 | 13 | from utils.init import * 14 | 15 | __all__ = ['EIU_Net'] 16 | 17 | import warnings 18 | 19 | warnings.filterwarnings('ignore') 20 | 21 | 22 | class ResEncoder(nn.Module): 23 | def __init__(self, in_channels, out_channels): 24 | super(ResEncoder, self).__init__() 25 | self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1) 26 | self.bn1 = nn.BatchNorm2d(out_channels) 27 | self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1) 28 | self.bn2 = nn.BatchNorm2d(out_channels) 29 | self.relu = nn.ReLU(inplace=False) 30 | self.conv1x1 = nn.Conv2d(in_channels, out_channels, kernel_size=1) 31 | 32 | def forward(self, x): 33 | residual = self.conv1x1(x) 34 | out = self.relu(self.bn1(self.conv1(x))) 35 | out = self.relu(self.bn2(self.conv2(out))) 36 | out = out + residual 37 | out = self.relu(out) 38 | return out 39 | 40 | 41 | class DoubleConv(nn.Module): 42 | def __init__(self, in_ch, out_ch): 43 | super(DoubleConv, self).__init__() 44 | self.conv = nn.Sequential( 45 | nn.Conv2d(in_ch, out_ch, 3, padding=1), 46 | nn.BatchNorm2d(out_ch), 47 | nn.ReLU(inplace=True), 48 | nn.Conv2d(out_ch, out_ch, 3, padding=1), 49 | nn.BatchNorm2d(out_ch), 50 | nn.ReLU(inplace=True) 51 | ) 52 | 53 | def forward(self, x): 54 | return self.conv(x) 55 | 56 | 57 | class UP(nn.Module): 58 | 59 | def __init__(self, in_channels, out_channels): 60 | super(UP, self).__init__() 61 | self.up = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=2, stride=2) 62 | 63 | def forward(self, x): 64 | x = self.up(x) 65 | return x 66 | 67 | 68 | class OutConv(nn.Module): 69 | def __init__(self, in_channels, out_channels): 70 | super(OutConv, self).__init__() 71 | self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) 72 | 73 | def forward(self, x): 74 | return self.conv(x) 75 | 76 | 77 | class EIU_Net(nn.Module): 78 | def __init__(self, n_channels, n_classes): 79 | super(EIU_Net, self).__init__() 80 | self.n_channels = n_channels 81 | self.n_classes = n_classes 82 | 83 | filters = [32, 64, 128, 256, 512] 84 | 85 | self.enc_input = ResEncoder(self.n_channels, filters[0]) 86 | self.encoder_1 = InvertedResidual(filters[0], filters[1]) 87 | 88 | self.encoder_2 = InvertedResidual(filters[1], filters[2]) 89 | 90 | self.encoder_3 = InvertedResidual(filters[2], filters[3]) 91 | 92 | self.encoder_4 = EPSABlock(filters[3], 128) 93 | self.downsample = downsample_soft() 94 | self.aspp = ASPP(filters[4], [6, 12, 18]) 95 | 96 | self.decoder_4 = UP(filters[4], filters[3]) 97 | self.double_conv_4 = DoubleConv(filters[4], filters[3]) 98 | self.decoder_3 = UP(filters[3], filters[2]) 99 | self.double_conv_3 = DoubleConv(filters[3], filters[2]) 100 | self.decoder_2 = UP(filters[2], filters[1]) 101 | self.double_conv_2 = DoubleConv(filters[2], filters[1]) 102 | self.decoder_1 = UP(filters[1], filters[0]) 103 | self.double_conv_1 = DoubleConv(filters[1], filters[0]) 104 | 105 | self.reshape_4 = reshaped(in_size=256, out_size=4, scale_factor=(224, 320)) 106 | self.reshape_3 = reshaped(in_size=128, out_size=4, scale_factor=(224, 320)) 107 | self.reshape_2 = reshaped(in_size=64, out_size=4, scale_factor=(224, 320)) 108 | self.reshape_1 = nn.Conv2d(in_channels=32, out_channels=4, kernel_size=1) 109 | self.scale_att = scale_atten_convblock_softpool(in_size=16, out_size=4) 110 | # self.scale_att = scale_attention(16, 4) 111 | 112 | self.final = OutConv(4, self.n_classes) 113 | 114 | self.mul_scale_att_1 = MultiScaleAttention(32, 64, 64) 115 | self.mul_scale_att_2 = MultiScaleAttention(64, 128, 128) 116 | self.mul_scale_att_3 = MultiScaleAttention(128, 256, 256) 117 | 118 | # self.out = nn.Sigmoid() 119 | 120 | for m in self.modules(): 121 | if isinstance(m, nn.Conv2d): 122 | init_weights(m, init_type='kaiming') 123 | elif isinstance(m, nn.BatchNorm2d): 124 | init_weights(m, init_type='kaiming') 125 | 126 | # initialize_weights(self) 127 | 128 | def forward(self, x): 129 | enc_input = self.enc_input(x) # 32 [-1, 32, 224, 320] 130 | down1 = self.downsample(enc_input) # [-1, 32, 112, 160] 131 | enc_1 = self.encoder_1(down1) # 64 [-1, 64, 112, 160] 132 | mid_attention_1 = self.downsample(self.mul_scale_att_1(enc_input, enc_1)) 133 | down2 = self.downsample(enc_1) # [-1, 64, 56, 80] 134 | enc_2 = self.encoder_2(down2) # 128 [-1, 128, 56, 80] 135 | mid_attention_2 = self.downsample(self.mul_scale_att_2(enc_1, enc_2)) 136 | down3 = self.downsample(enc_2) # [-1, 128, 28, 40] 137 | enc_3 = self.encoder_3(down3) # 256 [-1, 256, 28, 40] 138 | mid_attention_3 = self.downsample(self.mul_scale_att_3(enc_2, enc_3)) 139 | down4 = self.downsample(enc_3) # [-1, 256, 28, 40] 140 | enc_4 = self.encoder_4(down4) # 512 [-1, 512, 14, 20] 141 | enc_4 = self.aspp(enc_4) # ASPP [-1, 512, 14, 20] 142 | up4 = self.decoder_4(enc_4) # 256 143 | concat_4 = torch.cat((mid_attention_3, up4), dim=1) 144 | up4 = self.double_conv_4(concat_4) 145 | up3 = self.decoder_3(up4) # 128 146 | concat_3 = torch.cat((mid_attention_2, up3), dim=1) 147 | up3 = self.double_conv_3(concat_3) 148 | up2 = self.decoder_2(up3) # 64 149 | concat_2 = torch.cat((mid_attention_1, up2), dim=1) 150 | up2 = self.double_conv_2(concat_2) 151 | up1 = self.decoder_1(up2) # 32 152 | concat_1 = torch.cat((enc_input, up1), dim=1) 153 | up1 = self.double_conv_1(concat_1) 154 | 155 | dsv4 = self.reshape_4(up4) # [16, 4, 224, 320] 156 | dsv3 = self.reshape_3(up3) 157 | dsv2 = self.reshape_2(up2) 158 | dsv1 = self.reshape_1(up1) 159 | dsv_cat = torch.cat([dsv1, dsv2, dsv3, dsv4], dim=1) # [16, 16, 224, 320] 160 | out = self.scale_att(dsv_cat) # [16, 4, 224, 300] 161 | 162 | final = self.final(out) # 2 163 | 164 | # out = self.out(final) 165 | 166 | return final 167 | 168 | 169 | 170 | -------------------------------------------------------------------------------- /parser_arguments.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import losses_new 4 | import network 5 | 6 | 7 | NETWORK_NAMES = network.__all__ 8 | LOSS_NAMES = losses_new.__all__ 9 | LOSS_NAMES.append('BCEWithLogitsLoss') 10 | 11 | 12 | def parse_args(): 13 | parser = argparse.ArgumentParser() 14 | 15 | parser.add_argument('--name', default=None, 16 | help='model name: (default: arch+timestamp)') 17 | parser.add_argument('--epochs', default=150, type=int, metavar='N', 18 | help='number of total epochs to run') 19 | parser.add_argument('-b', '--batch_size', default=8, type=int, 20 | metavar='N', help='mini-batch size (default: 16)') 21 | 22 | # model 23 | parser.add_argument('--device', default='cuda', help='device to run') 24 | parser.add_argument('--arch', '-a', metavar='ARCH', default='EIU_Net', 25 | choices=['EIU_Net', 'CPF_Net', 'AttentionUnet', 'CE_Net', 'ori_UNet', 'DCSAU_Net', 'FAT_Net', 26 | 'UNext']) 27 | parser.add_argument('--input_channels', default=3, type=int, 28 | help='input channels') 29 | parser.add_argument('--num_classes', default=1, type=int, 30 | help='number of classes') 31 | parser.add_argument('--input_w', default=320, type=int, 32 | help='image width') 33 | parser.add_argument('--input_h', default=224, type=int, 34 | help='image height') 35 | 36 | # loss 37 | parser.add_argument('--loss', default='Combined_Bce_Dice_Loss', 38 | choices=LOSS_NAMES, 39 | help='loss: ' + 40 | ' | '.join(LOSS_NAMES) + 41 | ' (default: BCEDiceLoss)') 42 | 43 | # dataset 44 | parser.add_argument('--dataset', default='PH2', 45 | help='dataset name') 46 | parser.add_argument('--img_ext', default='.jpg', 47 | help='image file extension') 48 | parser.add_argument('--mask_ext', default='.png', 49 | help='mask file extension') 50 | 51 | # optimizer 52 | parser.add_argument('--optimizer', default='Adam', 53 | choices=['Adam', 'SGD', 'AdamW'], 54 | help='optimizer: ' + 55 | ' | '.join(['Adam', 'SGD', 'AdamW']) + 56 | ' (default: AdamW)') 57 | parser.add_argument('--lr', '--learning_rate', default=1e-3, type=float, 58 | metavar='LR', help='initial learning rate') 59 | parser.add_argument('--momentum', default=0.9, type=float, 60 | help='momentum') 61 | parser.add_argument('--weight_decay', default=1e-4, type=float, 62 | help='weight decay') 63 | 64 | # scheduler 65 | parser.add_argument('--scheduler', default='CosineAnnealingWarmRestarts', 66 | choices=['CosineAnnealingLR', 'ReduceLROnPlateau', 'MultiStepLR', 'ConstantLR', 67 | 'CosineAnnealingWarmRestarts']) 68 | parser.add_argument('--min_lr', default=1e-5, type=float, 69 | help='minimum learning rate') 70 | parser.add_argument('--factor', default=0.5, type=float) 71 | parser.add_argument('--patience', default=20, type=int) 72 | parser.add_argument('--milestones', default='1,2', type=str) 73 | parser.add_argument('--gamma', default=2 / 3, type=float) 74 | parser.add_argument('--early_stopping', default=-1, type=int, 75 | metavar='N', help='early stopping (default: -1)') 76 | # parser.add_argument('--cfg', type=str, metavar="FILE", help='path to config file', ) 77 | 78 | parser.add_argument('--num_workers', default=6, type=int) 79 | 80 | config = parser.parse_args() 81 | 82 | return config 83 | -------------------------------------------------------------------------------- /utils/__pycache__/dice_loss.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/utils/__pycache__/dice_loss.cpython-38.pyc -------------------------------------------------------------------------------- /utils/__pycache__/dice_loss_github.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/utils/__pycache__/dice_loss_github.cpython-38.pyc -------------------------------------------------------------------------------- /utils/__pycache__/init.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AwebNoob/EIU-Net/256f8cf227101973f9c7a266809429d331cffcc5/utils/__pycache__/init.cpython-38.pyc -------------------------------------------------------------------------------- /utils/binary.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | from scipy.ndimage import _ni_support 3 | from scipy.ndimage.morphology import distance_transform_edt, binary_erosion, generate_binary_structure 4 | from scipy.ndimage.measurements import label, find_objects 5 | from scipy.stats import pearsonr 6 | 7 | 8 | # code 9 | def dc(result, reference): 10 | r""" 11 | Dice coefficient 12 | 13 | Computes the Dice coefficient (also known as Sorensen index) between the binary 14 | objects in two images. 15 | 16 | The metric is defined as 17 | 18 | .. math:: 19 | 20 | DC=\frac{2|A\cap B|}{|A|+|B|} 21 | 22 | , where :math:`A` is the first and :math:`B` the second set of samples (here: binary objects). 23 | 24 | Parameters 25 | ---------- 26 | result : array_like 27 | Input data containing objects. Can be any type but will be converted 28 | into binary: background where 0, object everywhere else. 29 | reference : array_like 30 | Input data containing objects. Can be any type but will be converted 31 | into binary: background where 0, object everywhere else. 32 | 33 | Returns 34 | ------- 35 | dc : float 36 | The Dice coefficient between the object(s) in ```result``` and the 37 | object(s) in ```reference```. It ranges from 0 (no overlap) to 1 (perfect overlap). 38 | 39 | Notes 40 | ----- 41 | This is a real metric. The binary images can therefore be supplied in any order. 42 | """ 43 | result = numpy.atleast_1d(result.astype(numpy.bool)) 44 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 45 | 46 | intersection = numpy.count_nonzero(result & reference) 47 | 48 | size_i1 = numpy.count_nonzero(result) 49 | size_i2 = numpy.count_nonzero(reference) 50 | 51 | try: 52 | dc = 2. * intersection / (float(size_i1 + size_i2) + 1e-6) 53 | except ZeroDivisionError: 54 | dc = 0.0 55 | 56 | return dc 57 | 58 | 59 | def tversky(result, reference): 60 | aa = 1 61 | return aa 62 | 63 | 64 | def jc(result, reference): 65 | """ 66 | Jaccard coefficient 67 | 68 | Computes the Jaccard coefficient between the binary objects in two images. 69 | 70 | Parameters 71 | ---------- 72 | result: array_like 73 | Input data containing objects. Can be any type but will be converted 74 | into binary: background where 0, object everywhere else. 75 | reference: array_like 76 | Input data containing objects. Can be any type but will be converted 77 | into binary: background where 0, object everywhere else. 78 | Returns 79 | ------- 80 | jc: float 81 | The Jaccard coefficient between the object(s) in `result` and the 82 | object(s) in `reference`. It ranges from 0 (no overlap) to 1 (perfect overlap). 83 | 84 | Notes 85 | ----- 86 | This is a real metric. The binary images can therefore be supplied in any order. 87 | """ 88 | result = numpy.atleast_1d(result.astype(numpy.bool)) 89 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 90 | 91 | intersection = numpy.count_nonzero(result & reference) 92 | union = numpy.count_nonzero(result | reference) 93 | 94 | jc = float(intersection) / (float(union) + 1e-6) 95 | 96 | return jc 97 | 98 | 99 | def precision(result, reference): 100 | """ 101 | Precison. 102 | 103 | Parameters 104 | ---------- 105 | result : array_like 106 | Input data containing objects. Can be any type but will be converted 107 | into binary: background where 0, object everywhere else. 108 | reference : array_like 109 | Input data containing objects. Can be any type but will be converted 110 | into binary: background where 0, object everywhere else. 111 | 112 | Returns 113 | ------- 114 | precision : float 115 | The precision between two binary datasets, here mostly binary objects in images, 116 | which is defined as the fraction of retrieved instances that are relevant. The 117 | precision is not symmetric. 118 | 119 | See also 120 | -------- 121 | :func:`recall` 122 | 123 | Notes 124 | ----- 125 | Not symmetric. The inverse of the precision is :func:`recall`. 126 | High precision means that an algorithm returned substantially more relevant results than irrelevant. 127 | 128 | References 129 | ---------- 130 | .. [1] http://en.wikipedia.org/wiki/Precision_and_recall 131 | .. [2] http://en.wikipedia.org/wiki/Confusion_matrix#Table_of_confusion 132 | """ 133 | result = numpy.atleast_1d(result.astype(numpy.bool)) 134 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 135 | 136 | tp = numpy.count_nonzero(result & reference) 137 | fp = numpy.count_nonzero(result & ~reference) 138 | 139 | try: 140 | precision = tp / (float(tp + fp) + 1e-6) 141 | except ZeroDivisionError: 142 | precision = 0.0 143 | 144 | return precision 145 | 146 | 147 | def recall(result, reference): 148 | """ 149 | Recall. 150 | 151 | Parameters 152 | ---------- 153 | result : array_like 154 | Input data containing objects. Can be any type but will be converted 155 | into binary: background where 0, object everywhere else. 156 | reference : array_like 157 | Input data containing objects. Can be any type but will be converted 158 | into binary: background where 0, object everywhere else. 159 | 160 | Returns 161 | ------- 162 | recall : float 163 | The recall between two binary datasets, here mostly binary objects in images, 164 | which is defined as the fraction of relevant instances that are retrieved. The 165 | recall is not symmetric. 166 | 167 | See also 168 | -------- 169 | :func:`precision` 170 | 171 | Notes 172 | ----- 173 | Not symmetric. The inverse of the recall is :func:`precision`. 174 | High recall means that an algorithm returned most of the relevant results. 175 | 176 | References 177 | ---------- 178 | .. [1] http://en.wikipedia.org/wiki/Precision_and_recall 179 | .. [2] http://en.wikipedia.org/wiki/Confusion_matrix#Table_of_confusion 180 | """ 181 | result = numpy.atleast_1d(result.astype(numpy.bool)) 182 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 183 | 184 | tp = numpy.count_nonzero(result & reference) 185 | fn = numpy.count_nonzero(~result & reference) 186 | 187 | try: 188 | recall = tp / (float(tp + fn) + 1e-6) 189 | except ZeroDivisionError: 190 | recall = 0.0 191 | 192 | return recall 193 | 194 | 195 | def sensitivity(result, reference): 196 | """ 197 | Sensitivity. 198 | Same as :func:`recall`, see there for a detailed description. 199 | 200 | See also 201 | -------- 202 | :func:`specificity` 203 | """ 204 | return recall(result, reference) 205 | 206 | 207 | def specificity(result, reference): 208 | """ 209 | Specificity. 210 | 211 | Parameters 212 | ---------- 213 | result : array_like 214 | Input data containing objects. Can be any type but will be converted 215 | into binary: background where 0, object everywhere else. 216 | reference : array_like 217 | Input data containing objects. Can be any type but will be converted 218 | into binary: background where 0, object everywhere else. 219 | 220 | Returns 221 | ------- 222 | specificity : float 223 | The specificity between two binary datasets, here mostly binary objects in images, 224 | which denotes the fraction of correctly returned negatives. The 225 | specificity is not symmetric. 226 | 227 | See also 228 | -------- 229 | :func:`sensitivity` 230 | 231 | Notes 232 | ----- 233 | Not symmetric. The completment of the specificity is :func:`sensitivity`. 234 | High recall means that an algorithm returned most of the irrelevant results. 235 | 236 | References 237 | ---------- 238 | .. [1] https://en.wikipedia.org/wiki/Sensitivity_and_specificity 239 | .. [2] http://en.wikipedia.org/wiki/Confusion_matrix#Table_of_confusion 240 | """ 241 | result = numpy.atleast_1d(result.astype(numpy.bool)) 242 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 243 | 244 | tn = numpy.count_nonzero(~result & ~reference) 245 | fp = numpy.count_nonzero(result & ~reference) 246 | 247 | try: 248 | specificity = tn / (float(tn + fp) + 1e-6) 249 | except ZeroDivisionError: 250 | specificity = 0.0 251 | 252 | return specificity 253 | 254 | 255 | def F1(result, reference): 256 | PC = precision(result, reference) 257 | SE = sensitivity(result, reference) 258 | try: 259 | F1 = 2 * (PC * SE) / (PC + SE + 1e-6) 260 | except ZeroDivisionError: 261 | F1 = 0.0 262 | 263 | return F1 264 | 265 | 266 | def ACC(result, reference): 267 | result = numpy.atleast_1d(result.astype(numpy.bool)) 268 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 269 | 270 | tp = numpy.count_nonzero(result & reference) 271 | tn = numpy.count_nonzero(~result & ~reference) 272 | fp = numpy.count_nonzero(result & ~reference) 273 | fn = numpy.count_nonzero(~result & reference) 274 | 275 | try: 276 | ACC = (tp + tn) / (tp + tn + fp + fn) 277 | except ZeroDivisionError: 278 | ACC = 0.0 279 | 280 | return ACC 281 | 282 | 283 | def true_negative_rate(result, reference): 284 | """ 285 | True negative rate. 286 | Same as :func:`specificity`, see there for a detailed description. 287 | 288 | See also 289 | -------- 290 | :func:`true_positive_rate` 291 | :func:`positive_predictive_value` 292 | """ 293 | return specificity(result, reference) 294 | 295 | 296 | def true_positive_rate(result, reference): 297 | """ 298 | True positive rate. 299 | Same as :func:`recall` and :func:`sensitivity`, see there for a detailed description. 300 | 301 | See also 302 | -------- 303 | :func:`positive_predictive_value` 304 | :func:`true_negative_rate` 305 | """ 306 | return recall(result, reference) 307 | 308 | 309 | def positive_predictive_value(result, reference): 310 | """ 311 | Positive predictive value. 312 | Same as :func:`precision`, see there for a detailed description. 313 | 314 | See also 315 | -------- 316 | :func:`true_positive_rate` 317 | :func:`true_negative_rate` 318 | """ 319 | return precision(result, reference) 320 | 321 | 322 | def hd(result, reference, voxelspacing=None, connectivity=1): 323 | """ 324 | Hausdorff Distance. 325 | 326 | Computes the (symmetric) Hausdorff Distance (HD) between the binary objects in two 327 | images. It is defined as the maximum surface distance between the objects. 328 | 329 | Parameters 330 | ---------- 331 | result : array_like 332 | Input data containing objects. Can be any type but will be converted 333 | into binary: background where 0, object everywhere else. 334 | reference : array_like 335 | Input data containing objects. Can be any type but will be converted 336 | into binary: background where 0, object everywhere else. 337 | voxelspacing : float or sequence of floats, optional 338 | The voxelspacing in a distance unit i.e. spacing of elements 339 | along each dimension. If a sequence, must be of length equal to 340 | the input rank; if a single number, this is used for all axes. If 341 | not specified, a grid spacing of unity is implied. 342 | connectivity : int 343 | The neighbourhood/connectivity considered when determining the surface 344 | of the binary objects. This value is passed to 345 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 346 | Note that the connectivity influences the result in the case of the Hausdorff distance. 347 | 348 | Returns 349 | ------- 350 | hd : float 351 | The symmetric Hausdorff Distance between the object(s) in ```result``` and the 352 | object(s) in ```reference```. The distance unit is the same as for the spacing of 353 | elements along each dimension, which is usually given in mm. 354 | 355 | See also 356 | -------- 357 | :func:`assd` 358 | :func:`asd` 359 | 360 | Notes 361 | ----- 362 | This is a real metric. The binary images can therefore be supplied in any order. 363 | """ 364 | hd1 = __surface_distances(result, reference, voxelspacing, connectivity).max() 365 | hd2 = __surface_distances(reference, result, voxelspacing, connectivity).max() 366 | hd = max(hd1, hd2) 367 | return hd 368 | 369 | 370 | def hd95(result, reference, voxelspacing=None, connectivity=1): 371 | """ 372 | 95th percentile of the Hausdorff Distance. 373 | Computes the 95th percentile of the (symmetric) Hausdorff Distance (HD) between the binary objects in two 374 | images. Compared to the Hausdorff Distance, this metric is slightly more stable to small outliers and is 375 | commonly used in Biomedical Segmentation challenges. 376 | Parameters 377 | ---------- 378 | result : array_like 379 | Input data containing objects. Can be any type but will be converted 380 | into binary: background where 0, object everywhere else. 381 | reference : array_like 382 | Input data containing objects. Can be any type but will be converted 383 | into binary: background where 0, object everywhere else. 384 | voxelspacing : float or sequence of floats, optional 385 | The voxelspacing in a distance unit i.e. spacing of elements 386 | along each dimension. If a sequence, must be of length equal to 387 | the input rank; if a single number, this is used for all axes. If 388 | not specified, a grid spacing of unity is implied. 389 | connectivity : int 390 | The neighbourhood/connectivity considered when determining the surface 391 | of the binary objects. This value is passed to 392 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 393 | Note that the connectivity influences the result in the case of the Hausdorff distance. 394 | Returns 395 | ------- 396 | hd : float 397 | The symmetric Hausdorff Distance between the object(s) in ```result``` and the 398 | object(s) in ```reference```. The distance unit is the same as for the spacing of 399 | elements along each dimension, which is usually given in mm. 400 | See also 401 | -------- 402 | :func:`hd` 403 | Notes 404 | ----- 405 | This is a real metric. The binary images can therefore be supplied in any order. 406 | """ 407 | hd1 = __surface_distances(result, reference, voxelspacing, connectivity) 408 | hd2 = __surface_distances(reference, result, voxelspacing, connectivity) 409 | hd95 = numpy.percentile(numpy.hstack((hd1, hd2)), 95) 410 | return hd95 411 | 412 | 413 | def assd(result, reference, voxelspacing=None, connectivity=1): 414 | """ 415 | Average symmetric surface distance. 416 | 417 | Computes the average symmetric surface distance (ASD) between the binary objects in 418 | two images. 419 | 420 | Parameters 421 | ---------- 422 | result : array_like 423 | Input data containing objects. Can be any type but will be converted 424 | into binary: background where 0, object everywhere else. 425 | reference : array_like 426 | Input data containing objects. Can be any type but will be converted 427 | into binary: background where 0, object everywhere else. 428 | voxelspacing : float or sequence of floats, optional 429 | The voxelspacing in a distance unit i.e. spacing of elements 430 | along each dimension. If a sequence, must be of length equal to 431 | the input rank; if a single number, this is used for all axes. If 432 | not specified, a grid spacing of unity is implied. 433 | connectivity : int 434 | The neighbourhood/connectivity considered when determining the surface 435 | of the binary objects. This value is passed to 436 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 437 | The decision on the connectivity is important, as it can influence the results 438 | strongly. If in doubt, leave it as it is. 439 | 440 | Returns 441 | ------- 442 | assd : float 443 | The average symmetric surface distance between the object(s) in ``result`` and the 444 | object(s) in ``reference``. The distance unit is the same as for the spacing of 445 | elements along each dimension, which is usually given in mm. 446 | 447 | See also 448 | -------- 449 | :func:`asd` 450 | :func:`hd` 451 | 452 | Notes 453 | ----- 454 | This is a real metric, obtained by calling and averaging 455 | 456 | >>> asd(result, reference) 457 | 458 | and 459 | 460 | >>> asd(reference, result) 461 | 462 | The binary images can therefore be supplied in any order. 463 | """ 464 | assd = numpy.mean( 465 | (asd(result, reference, voxelspacing, connectivity), asd(reference, result, voxelspacing, connectivity))) 466 | return assd 467 | 468 | 469 | def asd(result, reference, voxelspacing=None, connectivity=1): 470 | """ 471 | Average surface distance metric. 472 | 473 | Computes the average surface distance (ASD) between the binary objects in two images. 474 | 475 | Parameters 476 | ---------- 477 | result : array_like 478 | Input data containing objects. Can be any type but will be converted 479 | into binary: background where 0, object everywhere else. 480 | reference : array_like 481 | Input data containing objects. Can be any type but will be converted 482 | into binary: background where 0, object everywhere else. 483 | voxelspacing : float or sequence of floats, optional 484 | The voxelspacing in a distance unit i.e. spacing of elements 485 | along each dimension. If a sequence, must be of length equal to 486 | the input rank; if a single number, this is used for all axes. If 487 | not specified, a grid spacing of unity is implied. 488 | connectivity : int 489 | The neighbourhood/connectivity considered when determining the surface 490 | of the binary objects. This value is passed to 491 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 492 | The decision on the connectivity is important, as it can influence the results 493 | strongly. If in doubt, leave it as it is. 494 | 495 | Returns 496 | ------- 497 | asd : float 498 | The average surface distance between the object(s) in ``result`` and the 499 | object(s) in ``reference``. The distance unit is the same as for the spacing 500 | of elements along each dimension, which is usually given in mm. 501 | 502 | See also 503 | -------- 504 | :func:`assd` 505 | :func:`hd` 506 | 507 | 508 | Notes 509 | ----- 510 | This is not a real metric, as it is directed. See `assd` for a real metric of this. 511 | 512 | The method is implemented making use of distance images and simple binary morphology 513 | to achieve high computational speed. 514 | 515 | Examples 516 | -------- 517 | The `connectivity` determines what pixels/voxels are considered the surface of a 518 | binary object. Take the following binary image showing a cross 519 | 520 | >>> from scipy.ndimage.morphology import generate_binary_structure 521 | >>> cross = generate_binary_structure(2, 1) 522 | array([[0, 1, 0], 523 | [1, 1, 1], 524 | [0, 1, 0]]) 525 | 526 | With `connectivity` set to `1` a 4-neighbourhood is considered when determining the 527 | object surface, resulting in the surface 528 | 529 | .. code-block:: python 530 | 531 | array([[0, 1, 0], 532 | [1, 0, 1], 533 | [0, 1, 0]]) 534 | 535 | Changing `connectivity` to `2`, a 8-neighbourhood is considered and we get: 536 | 537 | .. code-block:: python 538 | 539 | array([[0, 1, 0], 540 | [1, 1, 1], 541 | [0, 1, 0]]) 542 | 543 | , as a diagonal connection does no longer qualifies as valid object surface. 544 | 545 | This influences the results `asd` returns. Imagine we want to compute the surface 546 | distance of our cross to a cube-like object: 547 | 548 | >>> cube = generate_binary_structure(2, 1) 549 | array([[1, 1, 1], 550 | [1, 1, 1], 551 | [1, 1, 1]]) 552 | 553 | , which surface is, independent of the `connectivity` value set, always 554 | 555 | .. code-block:: python 556 | 557 | array([[1, 1, 1], 558 | [1, 0, 1], 559 | [1, 1, 1]]) 560 | 561 | Using a `connectivity` of `1` we get 562 | 563 | >>> asd(cross, cube, connectivity=1) 564 | 0.0 565 | 566 | while a value of `2` returns us 567 | 568 | >>> asd(cross, cube, connectivity=2) 569 | 0.20000000000000001 570 | 571 | due to the center of the cross being considered surface as well. 572 | 573 | """ 574 | sds = __surface_distances(result, reference, voxelspacing, connectivity) 575 | asd = sds.mean() 576 | return asd 577 | 578 | 579 | def ravd(result, reference): 580 | """ 581 | Relative absolute volume difference. 582 | 583 | Compute the relative absolute volume difference between the (joined) binary objects 584 | in the two images. 585 | 586 | Parameters 587 | ---------- 588 | result : array_like 589 | Input data containing objects. Can be any type but will be converted 590 | into binary: background where 0, object everywhere else. 591 | reference : array_like 592 | Input data containing objects. Can be any type but will be converted 593 | into binary: background where 0, object everywhere else. 594 | 595 | Returns 596 | ------- 597 | ravd : float 598 | The relative absolute volume difference between the object(s) in ``result`` 599 | and the object(s) in ``reference``. This is a percentage value in the range 600 | :math:`[-1.0, +inf]` for which a :math:`0` denotes an ideal score. 601 | 602 | Raises 603 | ------ 604 | RuntimeError 605 | If the reference object is empty. 606 | 607 | See also 608 | -------- 609 | :func:`dc` 610 | :func:`precision` 611 | :func:`recall` 612 | 613 | Notes 614 | ----- 615 | This is not a real metric, as it is directed. Negative values denote a smaller 616 | and positive values a larger volume than the reference. 617 | This implementation does not check, whether the two supplied arrays are of the same 618 | size. 619 | 620 | Examples 621 | -------- 622 | Considering the following inputs 623 | 624 | >>> import numpy 625 | >>> arr1 = numpy.asarray([[0,1,0],[1,1,1],[0,1,0]]) 626 | >>> arr1 627 | array([[0, 1, 0], 628 | [1, 1, 1], 629 | [0, 1, 0]]) 630 | >>> arr2 = numpy.asarray([[0,1,0],[1,0,1],[0,1,0]]) 631 | >>> arr2 632 | array([[0, 1, 0], 633 | [1, 0, 1], 634 | [0, 1, 0]]) 635 | 636 | comparing `arr1` to `arr2` we get 637 | 638 | >>> ravd(arr1, arr2) 639 | -0.2 640 | 641 | and reversing the inputs the directivness of the metric becomes evident 642 | 643 | >>> ravd(arr2, arr1) 644 | 0.25 645 | 646 | It is important to keep in mind that a perfect score of `0` does not mean that the 647 | binary objects fit exactely, as only the volumes are compared: 648 | 649 | >>> arr1 = numpy.asarray([1,0,0]) 650 | >>> arr2 = numpy.asarray([0,0,1]) 651 | >>> ravd(arr1, arr2) 652 | 0.0 653 | 654 | """ 655 | result = numpy.atleast_1d(result.astype(numpy.bool)) 656 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 657 | 658 | vol1 = numpy.count_nonzero(result) 659 | vol2 = numpy.count_nonzero(reference) 660 | 661 | if 0 == vol2: 662 | raise RuntimeError('The second supplied array does not contain any binary object.') 663 | 664 | return (vol1 - vol2) / float(vol2) 665 | 666 | 667 | def volume_correlation(results, references): 668 | r""" 669 | Volume correlation. 670 | 671 | Computes the linear correlation in binary object volume between the 672 | contents of the successive binary images supplied. Measured through 673 | the Pearson product-moment correlation coefficient. 674 | 675 | Parameters 676 | ---------- 677 | results : sequence of array_like 678 | Ordered list of input data containing objects. Each array_like will be 679 | converted into binary: background where 0, object everywhere else. 680 | references : sequence of array_like 681 | Ordered list of input data containing objects. Each array_like will be 682 | converted into binary: background where 0, object everywhere else. 683 | The order must be the same as for ``results``. 684 | 685 | Returns 686 | ------- 687 | r : float 688 | The correlation coefficient between -1 and 1. 689 | p : float 690 | The two-side p value. 691 | 692 | """ 693 | results = numpy.atleast_2d(numpy.array(results).astype(numpy.bool)) 694 | references = numpy.atleast_2d(numpy.array(references).astype(numpy.bool)) 695 | 696 | results_volumes = [numpy.count_nonzero(r) for r in results] 697 | references_volumes = [numpy.count_nonzero(r) for r in references] 698 | 699 | return pearsonr(results_volumes, references_volumes) # returns (Pearson' 700 | 701 | 702 | def volume_change_correlation(results, references): 703 | r""" 704 | Volume change correlation. 705 | 706 | Computes the linear correlation of change in binary object volume between 707 | the contents of the successive binary images supplied. Measured through 708 | the Pearson product-moment correlation coefficient. 709 | 710 | Parameters 711 | ---------- 712 | results : sequence of array_like 713 | Ordered list of input data containing objects. Each array_like will be 714 | converted into binary: background where 0, object everywhere else. 715 | references : sequence of array_like 716 | Ordered list of input data containing objects. Each array_like will be 717 | converted into binary: background where 0, object everywhere else. 718 | The order must be the same as for ``results``. 719 | 720 | Returns 721 | ------- 722 | r : float 723 | The correlation coefficient between -1 and 1. 724 | p : float 725 | The two-side p value. 726 | 727 | """ 728 | results = numpy.atleast_2d(numpy.array(results).astype(numpy.bool)) 729 | references = numpy.atleast_2d(numpy.array(references).astype(numpy.bool)) 730 | 731 | results_volumes = numpy.asarray([numpy.count_nonzero(r) for r in results]) 732 | references_volumes = numpy.asarray([numpy.count_nonzero(r) for r in references]) 733 | 734 | results_volumes_changes = results_volumes[1:] - results_volumes[:-1] 735 | references_volumes_changes = references_volumes[1:] - references_volumes[:-1] 736 | 737 | return pearsonr(results_volumes_changes, 738 | references_volumes_changes) # returns (Pearson's correlation coefficient, 2-tailed p-value) 739 | 740 | 741 | def obj_assd(result, reference, voxelspacing=None, connectivity=1): 742 | """ 743 | Average symmetric surface distance. 744 | 745 | Computes the average symmetric surface distance (ASSD) between the binary objects in 746 | two images. 747 | 748 | Parameters 749 | ---------- 750 | result : array_like 751 | Input data containing objects. Can be any type but will be converted 752 | into binary: background where 0, object everywhere else. 753 | reference : array_like 754 | Input data containing objects. Can be any type but will be converted 755 | into binary: background where 0, object everywhere else. 756 | voxelspacing : float or sequence of floats, optional 757 | The voxelspacing in a distance unit i.e. spacing of elements 758 | along each dimension. If a sequence, must be of length equal to 759 | the input rank; if a single number, this is used for all axes. If 760 | not specified, a grid spacing of unity is implied. 761 | connectivity : int 762 | The neighbourhood/connectivity considered when determining what accounts 763 | for a distinct binary object as well as when determining the surface 764 | of the binary objects. This value is passed to 765 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 766 | The decision on the connectivity is important, as it can influence the results 767 | strongly. If in doubt, leave it as it is. 768 | 769 | Returns 770 | ------- 771 | assd : float 772 | The average symmetric surface distance between all mutually existing distinct 773 | binary object(s) in ``result`` and ``reference``. The distance unit is the same as for 774 | the spacing of elements along each dimension, which is usually given in mm. 775 | 776 | See also 777 | -------- 778 | :func:`obj_asd` 779 | 780 | Notes 781 | ----- 782 | This is a real metric, obtained by calling and averaging 783 | 784 | >>> obj_asd(result, reference) 785 | 786 | and 787 | 788 | >>> obj_asd(reference, result) 789 | 790 | The binary images can therefore be supplied in any order. 791 | """ 792 | assd = numpy.mean((obj_asd(result, reference, voxelspacing, connectivity), 793 | obj_asd(reference, result, voxelspacing, connectivity))) 794 | return assd 795 | 796 | 797 | def obj_asd(result, reference, voxelspacing=None, connectivity=1): 798 | """ 799 | Average surface distance between objects. 800 | 801 | First correspondences between distinct binary objects in reference and result are 802 | established. Then the average surface distance is only computed between corresponding 803 | objects. Correspondence is defined as unique and at least one voxel overlap. 804 | 805 | Parameters 806 | ---------- 807 | result : array_like 808 | Input data containing objects. Can be any type but will be converted 809 | into binary: background where 0, object everywhere else. 810 | reference : array_like 811 | Input data containing objects. Can be any type but will be converted 812 | into binary: background where 0, object everywhere else. 813 | voxelspacing : float or sequence of floats, optional 814 | The voxelspacing in a distance unit i.e. spacing of elements 815 | along each dimension. If a sequence, must be of length equal to 816 | the input rank; if a single number, this is used for all axes. If 817 | not specified, a grid spacing of unity is implied. 818 | connectivity : int 819 | The neighbourhood/connectivity considered when determining what accounts 820 | for a distinct binary object as well as when determining the surface 821 | of the binary objects. This value is passed to 822 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 823 | The decision on the connectivity is important, as it can influence the results 824 | strongly. If in doubt, leave it as it is. 825 | 826 | Returns 827 | ------- 828 | asd : float 829 | The average surface distance between all mutually existing distinct binary 830 | object(s) in ``result`` and ``reference``. The distance unit is the same as for the 831 | spacing of elements along each dimension, which is usually given in mm. 832 | 833 | See also 834 | -------- 835 | :func:`obj_assd` 836 | :func:`obj_tpr` 837 | :func:`obj_fpr` 838 | 839 | Notes 840 | ----- 841 | This is not a real metric, as it is directed. See `obj_assd` for a real metric of this. 842 | 843 | For the understanding of this metric, both the notions of connectedness and surface 844 | distance are essential. Please see :func:`obj_tpr` and :func:`obj_fpr` for more 845 | information on the first and :func:`asd` on the second. 846 | 847 | Examples 848 | -------- 849 | >>> arr1 = numpy.asarray([[1,1,1],[1,1,1],[1,1,1]]) 850 | >>> arr2 = numpy.asarray([[0,1,0],[0,1,0],[0,1,0]]) 851 | >>> arr1 852 | array([[1, 1, 1], 853 | [1, 1, 1], 854 | [1, 1, 1]]) 855 | >>> arr2 856 | array([[0, 1, 0], 857 | [0, 1, 0], 858 | [0, 1, 0]]) 859 | >>> obj_asd(arr1, arr2) 860 | 1.5 861 | >>> obj_asd(arr2, arr1) 862 | 0.333333333333 863 | 864 | With the `voxelspacing` parameter, the distances between the voxels can be set for 865 | each dimension separately: 866 | 867 | >>> obj_asd(arr1, arr2, voxelspacing=(1,2)) 868 | 1.5 869 | >>> obj_asd(arr2, arr1, voxelspacing=(1,2)) 870 | 0.333333333333 871 | 872 | More examples depicting the notion of object connectedness: 873 | 874 | >>> arr1 = numpy.asarray([[1,0,1],[1,0,0],[0,0,0]]) 875 | >>> arr2 = numpy.asarray([[1,0,1],[1,0,0],[0,0,1]]) 876 | >>> arr1 877 | array([[1, 0, 1], 878 | [1, 0, 0], 879 | [0, 0, 0]]) 880 | >>> arr2 881 | array([[1, 0, 1], 882 | [1, 0, 0], 883 | [0, 0, 1]]) 884 | >>> obj_asd(arr1, arr2) 885 | 0.0 886 | >>> obj_asd(arr2, arr1) 887 | 0.0 888 | 889 | >>> arr1 = numpy.asarray([[1,0,1],[1,0,1],[0,0,1]]) 890 | >>> arr2 = numpy.asarray([[1,0,1],[1,0,0],[0,0,1]]) 891 | >>> arr1 892 | array([[1, 0, 1], 893 | [1, 0, 1], 894 | [0, 0, 1]]) 895 | >>> arr2 896 | array([[1, 0, 1], 897 | [1, 0, 0], 898 | [0, 0, 1]]) 899 | >>> obj_asd(arr1, arr2) 900 | 0.6 901 | >>> obj_asd(arr2, arr1) 902 | 0.0 903 | 904 | Influence of `connectivity` parameter can be seen in the following example, where 905 | with the (default) connectivity of `1` the first array is considered to contain two 906 | objects, while with an increase connectivity of `2`, just one large object is 907 | detected. 908 | 909 | >>> arr1 = numpy.asarray([[1,0,0],[0,1,1],[0,1,1]]) 910 | >>> arr2 = numpy.asarray([[1,0,0],[0,0,0],[0,0,0]]) 911 | >>> arr1 912 | array([[1, 0, 0], 913 | [0, 1, 1], 914 | [0, 1, 1]]) 915 | >>> arr2 916 | array([[1, 0, 0], 917 | [0, 0, 0], 918 | [0, 0, 0]]) 919 | >>> obj_asd(arr1, arr2) 920 | 0.0 921 | >>> obj_asd(arr1, arr2, connectivity=2) 922 | 1.742955328 923 | 924 | Note that the connectivity also influence the notion of what is considered an object 925 | surface voxels. 926 | """ 927 | sds = list() 928 | labelmap1, labelmap2, _a, _b, mapping = __distinct_binary_object_correspondences(result, reference, connectivity) 929 | slicers1 = find_objects(labelmap1) 930 | slicers2 = find_objects(labelmap2) 931 | for lid2, lid1 in list(mapping.items()): 932 | window = __combine_windows(slicers1[lid1 - 1], slicers2[lid2 - 1]) 933 | object1 = labelmap1[window] == lid1 934 | object2 = labelmap2[window] == lid2 935 | sds.extend(__surface_distances(object1, object2, voxelspacing, connectivity)) 936 | asd = numpy.mean(sds) 937 | return asd 938 | 939 | 940 | def obj_fpr(result, reference, connectivity=1): 941 | """ 942 | The false positive rate of distinct binary object detection. 943 | 944 | The false positive rates gives a percentage measure of how many distinct binary 945 | objects in the second array do not exists in the first array. A partial overlap 946 | (of minimum one voxel) is here considered sufficient. 947 | 948 | In cases where two distinct binary object in the second array overlap with a single 949 | distinct object in the first array, only one is considered to have been detected 950 | successfully and the other is added to the count of false positives. 951 | 952 | Parameters 953 | ---------- 954 | result : array_like 955 | Input data containing objects. Can be any type but will be converted 956 | into binary: background where 0, object everywhere else. 957 | reference : array_like 958 | Input data containing objects. Can be any type but will be converted 959 | into binary: background where 0, object everywhere else. 960 | connectivity : int 961 | The neighbourhood/connectivity considered when determining what accounts 962 | for a distinct binary object. This value is passed to 963 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 964 | The decision on the connectivity is important, as it can influence the results 965 | strongly. If in doubt, leave it as it is. 966 | 967 | Returns 968 | ------- 969 | tpr : float 970 | A percentage measure of how many distinct binary objects in ``results`` have no 971 | corresponding binary object in ``reference``. It has the range :math:`[0, 1]`, where a :math:`0` 972 | denotes an ideal score. 973 | 974 | Raises 975 | ------ 976 | RuntimeError 977 | If the second array is empty. 978 | 979 | See also 980 | -------- 981 | :func:`obj_tpr` 982 | 983 | Notes 984 | ----- 985 | This is not a real metric, as it is directed. Whatever array is considered as 986 | reference should be passed second. A perfect score of :math:`0` tells that there are no 987 | distinct binary objects in the second array that do not exists also in the reference 988 | array, but does not reveal anything about objects in the reference array also 989 | existing in the second array (use :func:`obj_tpr` for this). 990 | 991 | Examples 992 | -------- 993 | >>> arr2 = numpy.asarray([[1,0,0],[1,0,1],[0,0,1]]) 994 | >>> arr1 = numpy.asarray([[0,0,1],[1,0,1],[0,0,1]]) 995 | >>> arr2 996 | array([[1, 0, 0], 997 | [1, 0, 1], 998 | [0, 0, 1]]) 999 | >>> arr1 1000 | array([[0, 0, 1], 1001 | [1, 0, 1], 1002 | [0, 0, 1]]) 1003 | >>> obj_fpr(arr1, arr2) 1004 | 0.0 1005 | >>> obj_fpr(arr2, arr1) 1006 | 0.0 1007 | 1008 | Example of directedness: 1009 | 1010 | >>> arr2 = numpy.asarray([1,0,1,0,1]) 1011 | >>> arr1 = numpy.asarray([1,0,1,0,0]) 1012 | >>> obj_fpr(arr1, arr2) 1013 | 0.0 1014 | >>> obj_fpr(arr2, arr1) 1015 | 0.3333333333333333 1016 | 1017 | Examples of multiple overlap treatment: 1018 | 1019 | >>> arr2 = numpy.asarray([1,0,1,0,1,1,1]) 1020 | >>> arr1 = numpy.asarray([1,1,1,0,1,0,1]) 1021 | >>> obj_fpr(arr1, arr2) 1022 | 0.3333333333333333 1023 | >>> obj_fpr(arr2, arr1) 1024 | 0.3333333333333333 1025 | 1026 | >>> arr2 = numpy.asarray([1,0,1,1,1,0,1]) 1027 | >>> arr1 = numpy.asarray([1,1,1,0,1,1,1]) 1028 | >>> obj_fpr(arr1, arr2) 1029 | 0.0 1030 | >>> obj_fpr(arr2, arr1) 1031 | 0.3333333333333333 1032 | 1033 | >>> arr2 = numpy.asarray([[1,0,1,0,0], 1034 | [1,0,0,0,0], 1035 | [1,0,1,1,1], 1036 | [0,0,0,0,0], 1037 | [1,0,1,0,0]]) 1038 | >>> arr1 = numpy.asarray([[1,1,1,0,0], 1039 | [0,0,0,0,0], 1040 | [1,1,1,0,1], 1041 | [0,0,0,0,0], 1042 | [1,1,1,0,0]]) 1043 | >>> obj_fpr(arr1, arr2) 1044 | 0.0 1045 | >>> obj_fpr(arr2, arr1) 1046 | 0.2 1047 | """ 1048 | _, _, _, n_obj_reference, mapping = __distinct_binary_object_correspondences(reference, result, connectivity) 1049 | return (n_obj_reference - len(mapping)) / float(n_obj_reference) 1050 | 1051 | 1052 | def obj_tpr(result, reference, connectivity=1): 1053 | """ 1054 | The true positive rate of distinct binary object detection. 1055 | 1056 | The true positive rates gives a percentage measure of how many distinct binary 1057 | objects in the first array also exists in the second array. A partial overlap 1058 | (of minimum one voxel) is here considered sufficient. 1059 | 1060 | In cases where two distinct binary object in the first array overlaps with a single 1061 | distinct object in the second array, only one is considered to have been detected 1062 | successfully. 1063 | 1064 | Parameters 1065 | ---------- 1066 | result : array_like 1067 | Input data containing objects. Can be any type but will be converted 1068 | into binary: background where 0, object everywhere else. 1069 | reference : array_like 1070 | Input data containing objects. Can be any type but will be converted 1071 | into binary: background where 0, object everywhere else. 1072 | connectivity : int 1073 | The neighbourhood/connectivity considered when determining what accounts 1074 | for a distinct binary object. This value is passed to 1075 | `scipy.ndimage.morphology.generate_binary_structure` and should usually be :math:`> 1`. 1076 | The decision on the connectivity is important, as it can influence the results 1077 | strongly. If in doubt, leave it as it is. 1078 | 1079 | Returns 1080 | ------- 1081 | tpr : float 1082 | A percentage measure of how many distinct binary objects in ``result`` also exists 1083 | in ``reference``. It has the range :math:`[0, 1]`, where a :math:`1` denotes an ideal score. 1084 | 1085 | Raises 1086 | ------ 1087 | RuntimeError 1088 | If the reference object is empty. 1089 | 1090 | See also 1091 | -------- 1092 | :func:`obj_fpr` 1093 | 1094 | Notes 1095 | ----- 1096 | This is not a real metric, as it is directed. Whatever array is considered as 1097 | reference should be passed second. A perfect score of :math:`1` tells that all distinct 1098 | binary objects in the reference array also exist in the result array, but does not 1099 | reveal anything about additional binary objects in the result array 1100 | (use :func:`obj_fpr` for this). 1101 | 1102 | Examples 1103 | -------- 1104 | >>> arr2 = numpy.asarray([[1,0,0],[1,0,1],[0,0,1]]) 1105 | >>> arr1 = numpy.asarray([[0,0,1],[1,0,1],[0,0,1]]) 1106 | >>> arr2 1107 | array([[1, 0, 0], 1108 | [1, 0, 1], 1109 | [0, 0, 1]]) 1110 | >>> arr1 1111 | array([[0, 0, 1], 1112 | [1, 0, 1], 1113 | [0, 0, 1]]) 1114 | >>> obj_tpr(arr1, arr2) 1115 | 1.0 1116 | >>> obj_tpr(arr2, arr1) 1117 | 1.0 1118 | 1119 | Example of directedness: 1120 | 1121 | >>> arr2 = numpy.asarray([1,0,1,0,1]) 1122 | >>> arr1 = numpy.asarray([1,0,1,0,0]) 1123 | >>> obj_tpr(arr1, arr2) 1124 | 0.6666666666666666 1125 | >>> obj_tpr(arr2, arr1) 1126 | 1.0 1127 | 1128 | Examples of multiple overlap treatment: 1129 | 1130 | >>> arr2 = numpy.asarray([1,0,1,0,1,1,1]) 1131 | >>> arr1 = numpy.asarray([1,1,1,0,1,0,1]) 1132 | >>> obj_tpr(arr1, arr2) 1133 | 0.6666666666666666 1134 | >>> obj_tpr(arr2, arr1) 1135 | 0.6666666666666666 1136 | 1137 | >>> arr2 = numpy.asarray([1,0,1,1,1,0,1]) 1138 | >>> arr1 = numpy.asarray([1,1,1,0,1,1,1]) 1139 | >>> obj_tpr(arr1, arr2) 1140 | 0.6666666666666666 1141 | >>> obj_tpr(arr2, arr1) 1142 | 1.0 1143 | 1144 | >>> arr2 = numpy.asarray([[1,0,1,0,0], 1145 | [1,0,0,0,0], 1146 | [1,0,1,1,1], 1147 | [0,0,0,0,0], 1148 | [1,0,1,0,0]]) 1149 | >>> arr1 = numpy.asarray([[1,1,1,0,0], 1150 | [0,0,0,0,0], 1151 | [1,1,1,0,1], 1152 | [0,0,0,0,0], 1153 | [1,1,1,0,0]]) 1154 | >>> obj_tpr(arr1, arr2) 1155 | 0.8 1156 | >>> obj_tpr(arr2, arr1) 1157 | 1.0 1158 | """ 1159 | _, _, n_obj_result, _, mapping = __distinct_binary_object_correspondences(reference, result, connectivity) 1160 | return len(mapping) / float(n_obj_result) 1161 | 1162 | 1163 | def __distinct_binary_object_correspondences(reference, result, connectivity=1): 1164 | """ 1165 | Determines all distinct (where connectivity is defined by the connectivity parameter 1166 | passed to scipy's `generate_binary_structure`) binary objects in both of the input 1167 | parameters and returns a 1to1 mapping from the labelled objects in reference to the 1168 | corresponding (whereas a one-voxel overlap suffices for correspondence) objects in 1169 | result. 1170 | 1171 | All stems from the problem, that the relationship is non-surjective many-to-many. 1172 | 1173 | @return (labelmap1, labelmap2, n_lables1, n_labels2, labelmapping2to1) 1174 | """ 1175 | result = numpy.atleast_1d(result.astype(numpy.bool)) 1176 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 1177 | 1178 | # binary structure 1179 | footprint = generate_binary_structure(result.ndim, connectivity) 1180 | 1181 | # label distinct binary objects 1182 | labelmap1, n_obj_result = label(result, footprint) 1183 | labelmap2, n_obj_reference = label(reference, footprint) 1184 | 1185 | # find all overlaps from labelmap2 to labelmap1; collect one-to-one relationships and store all one-two-many for later processing 1186 | slicers = find_objects(labelmap2) # get windows of labelled objects 1187 | mapping = dict() # mappings from labels in labelmap2 to corresponding object labels in labelmap1 1188 | used_labels = set() # set to collect all already used labels from labelmap2 1189 | one_to_many = list() # list to collect all one-to-many mappings 1190 | for l1id, slicer in enumerate(slicers): # iterate over object in labelmap2 and their windows 1191 | l1id += 1 # labelled objects have ids sarting from 1 1192 | bobj = (l1id) == labelmap2[slicer] # find binary object corresponding to the label1 id in the segmentation 1193 | l2ids = numpy.unique(labelmap1[slicer][ 1194 | bobj]) # extract all unique object identifiers at the corresponding positions in the reference (i.e. the mapping) 1195 | l2ids = l2ids[0 != l2ids] # remove background identifiers (=0) 1196 | if 1 == len( 1197 | l2ids): # one-to-one mapping: if target label not already used, add to final list of object-to-object mappings and mark target label as used 1198 | l2id = l2ids[0] 1199 | if not l2id in used_labels: 1200 | mapping[l1id] = l2id 1201 | used_labels.add(l2id) 1202 | elif 1 < len(l2ids): # one-to-many mapping: store relationship for later processing 1203 | one_to_many.append((l1id, set(l2ids))) 1204 | 1205 | # process one-to-many mappings, always choosing the one with the least labelmap2 correspondences first 1206 | while True: 1207 | one_to_many = [(l1id, l2ids - used_labels) for l1id, l2ids in 1208 | one_to_many] # remove already used ids from all sets 1209 | one_to_many = [x for x in one_to_many if x[1]] # remove empty sets 1210 | one_to_many = sorted(one_to_many, key=lambda x: len(x[1])) # sort by set length 1211 | if 0 == len(one_to_many): 1212 | break 1213 | l2id = one_to_many[0][1].pop() # select an arbitrary target label id from the shortest set 1214 | mapping[one_to_many[0][0]] = l2id # add to one-to-one mappings 1215 | used_labels.add(l2id) # mark target label as used 1216 | one_to_many = one_to_many[1:] # delete the processed set from all sets 1217 | 1218 | return labelmap1, labelmap2, n_obj_result, n_obj_reference, mapping 1219 | 1220 | 1221 | def __surface_distances(result, reference, voxelspacing=None, connectivity=1): 1222 | """ 1223 | The distances between the surface voxel of binary objects in result and their 1224 | nearest partner surface voxel of a binary object in reference. 1225 | """ 1226 | result = numpy.atleast_1d(result.astype(numpy.bool)) 1227 | reference = numpy.atleast_1d(reference.astype(numpy.bool)) 1228 | if voxelspacing is not None: 1229 | voxelspacing = _ni_support._normalize_sequence(voxelspacing, result.ndim) 1230 | voxelspacing = numpy.asarray(voxelspacing, dtype=numpy.float64) 1231 | if not voxelspacing.flags.contiguous: 1232 | voxelspacing = voxelspacing.copy() 1233 | 1234 | # binary structure 1235 | footprint = generate_binary_structure(result.ndim, connectivity) 1236 | 1237 | # test for emptiness 1238 | if 0 == numpy.count_nonzero(result): 1239 | raise RuntimeError('The first supplied array does not contain any binary object.') 1240 | if 0 == numpy.count_nonzero(reference): 1241 | raise RuntimeError('The second supplied array does not contain any binary object.') 1242 | 1243 | # extract only 1-pixel border line of objects 1244 | result_border = result ^ binary_erosion(result, structure=footprint, iterations=1) 1245 | reference_border = reference ^ binary_erosion(reference, structure=footprint, iterations=1) 1246 | 1247 | # compute average surface distance 1248 | # Note: scipys distance transform is calculated only inside the borders of the 1249 | # foreground objects, therefore the input has to be reversed 1250 | dt = distance_transform_edt(~reference_border, sampling=voxelspacing) 1251 | sds = dt[result_border] 1252 | 1253 | return sds 1254 | 1255 | 1256 | def __combine_windows(w1, w2): 1257 | """ 1258 | Joins two windows (defined by tuple of slices) such that their maximum 1259 | combined extend is covered by the new returned window. 1260 | """ 1261 | res = [] 1262 | for s1, s2 in zip(w1, w2): 1263 | res.append(slice(min(s1.start, s2.start), max(s1.stop, s2.stop))) 1264 | return tuple(res) 1265 | -------------------------------------------------------------------------------- /utils/dice_loss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from torch.nn.modules.loss import _Loss 4 | 5 | 6 | def soft_dice_loss(prediction, soft_ground_truth, num_class, weight_map=None): 7 | predict = prediction.permute(0, 2, 3, 1) 8 | pred = predict.contiguous().view(-1, num_class) 9 | # pred = F.softmax(pred, dim=1) 10 | ground = soft_ground_truth.view(-1, num_class) 11 | n_voxels = ground.size(0) 12 | if weight_map is not None: 13 | weight_map = weight_map.view(-1) 14 | weight_map_nclass = weight_map.repeat(num_class).view_as(pred) 15 | ref_vol = torch.sum(weight_map_nclass * ground, 0) 16 | intersect = torch.sum(weight_map_nclass * ground * pred, 0) 17 | seg_vol = torch.sum(weight_map_nclass * pred, 0) 18 | else: 19 | ref_vol = torch.sum(ground, 0) 20 | intersect = torch.sum(ground * pred, 0) 21 | seg_vol = torch.sum(pred, 0) 22 | dice_score = (2.0 * intersect + 1e-5) / (ref_vol + seg_vol + 1.0 + 1e-5) 23 | # dice_loss = 1.0 - torch.mean(dice_score) 24 | # return dice_loss 25 | dice_loss = -torch.log(dice_score) 26 | dice_loss_ave = torch.mean(dice_loss) 27 | dice_score_lesion = dice_loss[1] 28 | return dice_loss_ave, dice_score_lesion 29 | 30 | 31 | class SoftDiceLoss(_Loss): 32 | ''' 33 | Soft_Dice = 2*|dot(A, B)| / (|dot(A, A)| + |dot(B, B)| + eps) 34 | eps is a small constant to avoid zero division, 35 | ''' 36 | 37 | def __init__(self, *args, **kwargs): 38 | super(SoftDiceLoss, self).__init__() 39 | 40 | # def forward(self, prediction, soft_ground_truth, num_class=2, weight_map=None, eps=1e-8) 41 | def forward(self, prediction, soft_ground_truth, num_class=3, weight_map=None, eps=1e-8): 42 | dice_loss_ave, dice_score_lesion = soft_dice_loss(prediction, soft_ground_truth, num_class, weight_map) 43 | return dice_loss_ave, dice_score_lesion 44 | 45 | 46 | def get_soft_label(input_tensor, num_class): 47 | """ 48 | convert a label tensor to soft label 49 | input_tensor: tensor with shape [N, C, H, W] 50 | output_tensor: shape [N, H, W, num_class] 51 | """ 52 | tensor_list = [] 53 | input_tensor = input_tensor.permute(0, 2, 3, 1) 54 | for i in range(num_class): 55 | temp_prob = torch.eq(input_tensor, i * torch.ones_like(input_tensor)) 56 | tensor_list.append(temp_prob) 57 | output_tensor = torch.cat(tensor_list, dim=-1) 58 | output_tensor = output_tensor.float() 59 | return output_tensor 60 | 61 | 62 | def val_dice_isic(prediction, soft_ground_truth, num_class): 63 | # predict = prediction.permute(0, 2, 3, 1) 64 | # pred = prediction.contiguous().view(-1, num_class) 65 | pred = prediction.view(-1, num_class) 66 | # pred = F.softmax(pred, dim=1) 67 | ground = soft_ground_truth.view(-1, num_class) 68 | ref_vol = torch.sum(ground, 0) 69 | intersect = torch.sum(ground * pred, 0) 70 | seg_vol = torch.sum(pred, 0) 71 | # dice_score = 2.0 * intersect / (ref_vol + seg_vol + 1.0) 72 | dice_score = 2.0 * intersect / (ref_vol + seg_vol + 1e-6) 73 | # dice_mean_score = torch.mean(dice_score) 74 | 75 | return dice_score 76 | 77 | 78 | def Intersection_over_Union_isic(prediction, soft_ground_truth, num_class): 79 | # predict = prediction.permute(0, 2, 3, 1) 80 | pred = prediction.contiguous().view(-1, num_class) 81 | # pred = F.softmax(pred, dim=1) 82 | ground = soft_ground_truth.view(-1, num_class) 83 | ref_vol = torch.sum(ground, 0) 84 | intersect = torch.sum(ground * pred, 0) 85 | seg_vol = torch.sum(pred, 0) 86 | iou_score = intersect / (ref_vol + seg_vol - intersect + 1.0) 87 | iou_mean_score = torch.mean(iou_score) 88 | 89 | return iou_mean_score 90 | -------------------------------------------------------------------------------- /utils/dice_loss_github.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from torch.autograd import Variable 4 | from torch import einsum 5 | import numpy as np 6 | import torch.nn.functional as F 7 | 8 | 9 | def softmax_helper(x): 10 | # copy from: https://github.com/MIC-DKFZ/nnUNet/blob/master/nnunet/utilities/nd_softmax.py 11 | rpt = [1 for _ in range(len(x.size()))] 12 | rpt[1] = x.size(1) 13 | x_max = x.max(1, keepdim=True)[0].repeat(*rpt) 14 | e_x = torch.exp(x - x_max) 15 | return e_x / e_x.sum(1, keepdim=True).repeat(*rpt) 16 | 17 | 18 | def sum_tensor(inp, axes, keepdim=False): 19 | # copy from: https://github.com/MIC-DKFZ/nnUNet/blob/master/nnunet/utilities/tensor_utilities.py 20 | axes = np.unique(axes).astype(int) 21 | if keepdim: 22 | for ax in axes: 23 | inp = inp.sum(int(ax), keepdim=True) 24 | else: 25 | for ax in sorted(axes, reverse=True): 26 | inp = inp.sum(int(ax)) 27 | return inp 28 | 29 | 30 | def get_tp_fp_fn(net_output, gt, axes=None, mask=None, square=False): 31 | """ 32 | net_output must be (b, c, x, y(, z))) 33 | gt must be a label map (shape (b, 1, x, y(, z)) OR shape (b, x, y(, z))) or one hot encoding (b, c, x, y(, z)) 34 | if mask is provided it must have shape (b, 1, x, y(, z))) 35 | :param net_output: 36 | :param gt: 37 | :param axes: 38 | :param mask: mask must be 1 for valid pixels and 0 for invalid pixels 39 | :param square: if True then fp, tp and fn will be squared before summation 40 | :return: 41 | """ 42 | if axes is None: 43 | axes = tuple(range(2, len(net_output.size()))) 44 | 45 | shp_x = net_output.shape 46 | shp_y = gt.shape 47 | 48 | with torch.no_grad(): 49 | if len(shp_x) != len(shp_y): 50 | gt = gt.view((shp_y[0], 1, *shp_y[1:])) 51 | 52 | if all([i == j for i, j in zip(net_output.shape, gt.shape)]): 53 | # if this is the case then gt is probably already a one hot encoding 54 | y_onehot = gt 55 | else: 56 | gt = gt.long() 57 | y_onehot = torch.zeros(shp_x) 58 | if net_output.device.type == "cuda": 59 | y_onehot = y_onehot.cuda(net_output.device.index) 60 | y_onehot.scatter_(1, gt, 1) 61 | 62 | tp = net_output * y_onehot 63 | fp = net_output * (1 - y_onehot) 64 | fn = (1 - net_output) * y_onehot 65 | 66 | if mask is not None: 67 | tp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tp, dim=1)), dim=1) 68 | fp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fp, dim=1)), dim=1) 69 | fn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fn, dim=1)), dim=1) 70 | 71 | if square: 72 | tp = tp ** 2 73 | fp = fp ** 2 74 | fn = fn ** 2 75 | 76 | tp = sum_tensor(tp, axes, keepdim=False) 77 | fp = sum_tensor(fp, axes, keepdim=False) 78 | fn = sum_tensor(fn, axes, keepdim=False) 79 | 80 | return tp, fp, fn 81 | 82 | 83 | class SoftDiceLoss_git(nn.Module): 84 | def __init__(self, apply_nonlin=None, batch_dice=True, dc_log=True, do_bg=True, smooth=1., 85 | square=False): 86 | """ 87 | paper: https://arxiv.org/pdf/1606.04797.pdf 88 | """ 89 | super(SoftDiceLoss_git, self).__init__() 90 | 91 | self.square = square 92 | self.do_bg = do_bg 93 | self.dc_log = dc_log 94 | self.batch_dice = batch_dice 95 | self.apply_nonlin = apply_nonlin 96 | self.smooth = smooth 97 | 98 | def forward(self, x, y, loss_mask=None): 99 | shp_x = x.shape 100 | 101 | if self.batch_dice: 102 | axes = [0] + list(range(2, len(shp_x))) 103 | else: 104 | axes = list(range(2, len(shp_x))) 105 | 106 | if self.apply_nonlin is not None: 107 | x = self.apply_nonlin(x) 108 | 109 | tp, fp, fn = get_tp_fp_fn(x, y, axes, loss_mask, self.square) 110 | 111 | dc = (2 * tp + self.smooth) / (2 * tp + fp + fn + self.smooth) 112 | 113 | if not self.do_bg: 114 | if self.batch_dice: 115 | dc = dc[1:] 116 | else: 117 | dc = dc[:, 1:] 118 | if self.dc_log: 119 | dc = -torch.log(dc) 120 | dc = dc.mean() 121 | else: 122 | dc = -dc.mean() 123 | return dc 124 | 125 | 126 | class CrossentropyND(torch.nn.CrossEntropyLoss): 127 | """ 128 | Network has to have NO NONLINEARITY! 129 | """ 130 | 131 | def forward(self, inp, target): 132 | target = target.long() 133 | num_classes = inp.size()[1] 134 | 135 | i0 = 1 136 | i1 = 2 137 | 138 | while i1 < len(inp.shape): # this is ugly but torch only allows to transpose two axes at once 139 | inp = inp.transpose(i0, i1) 140 | i0 += 1 141 | i1 += 1 142 | 143 | inp = inp.contiguous() 144 | inp = inp.view(-1, num_classes) 145 | 146 | target = target.view(-1, ) 147 | 148 | return super(CrossentropyND, self).forward(inp, target) 149 | -------------------------------------------------------------------------------- /utils/evaluation.py: -------------------------------------------------------------------------------- 1 | class AverageMeter(object): 2 | """Computes and stores the average and current value""" 3 | 4 | def __init__(self): 5 | self.reset() 6 | 7 | def reset(self): 8 | self.val = 0 9 | self.avg = 0 10 | self.sum = 0 11 | self.count = 0 12 | 13 | def update(self, val, n=1): 14 | self.val = val 15 | self.sum += val * n 16 | self.count += n 17 | self.avg = self.sum / self.count -------------------------------------------------------------------------------- /utils/init.py: -------------------------------------------------------------------------------- 1 | from torch.nn import init 2 | 3 | 4 | # initalize the module 5 | def init_weights(net, init_type='normal'): 6 | # print('initialization method [%s]' % init_type) 7 | if init_type == 'kaiming': 8 | net.apply(weights_init_kaiming) 9 | else: 10 | raise NotImplementedError('initialization method [%s] is not implemented' % init_type) 11 | 12 | 13 | def weights_init_kaiming(m): 14 | classname = m.__class__.__name__ 15 | # print(classname) 16 | if classname.find('Conv') != -1: 17 | init.kaiming_normal_(m.weight.data, a=0, mode='fan_in', nonlinearity='relu') 18 | elif classname.find('Linear') != -1: 19 | init.kaiming_normal_(m.weight.data, a=0, mode='fan_in', nonlinearity='relu') 20 | elif classname.find('BatchNorm') != -1: 21 | init.normal_(m.weight.data, 1.0, 0.02) 22 | init.constant_(m.bias.data, 0.0) 23 | 24 | 25 | # compute model params 26 | def count_param(model): 27 | param_count = 0 28 | for param in model.parameters(): 29 | param_count += param.view(-1).size()[0] 30 | return param_count 31 | -------------------------------------------------------------------------------- /utils/transform.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import random 3 | import PIL 4 | import numbers 5 | import numpy as np 6 | import torch.nn as nn 7 | import collections 8 | import matplotlib.pyplot as plt 9 | import torchvision.transforms as ts 10 | import torchvision.transforms.functional as TF 11 | from PIL import Image, ImageDraw 12 | 13 | _pil_interpolation_to_str = { 14 | Image.NEAREST: 'PIL.Image.NEAREST', 15 | Image.BILINEAR: 'PIL.Image.BILINEAR', 16 | Image.BICUBIC: 'PIL.Image.BICUBIC', 17 | Image.LANCZOS: 'PIL.Image.LANCZOS', 18 | } 19 | 20 | 21 | def ISIC2018_transform(sample, train_type): 22 | image, label = Image.fromarray(np.uint8(sample['image']), mode='RGB'), \ 23 | Image.fromarray(np.uint8(sample['label']), mode='L') 24 | 25 | if train_type == 'train': 26 | image, label = randomcrop(size=(224, 300))(image, label) 27 | image, label = randomflip_rotate(image, label, p=0.5, degrees=30) 28 | else: 29 | image, label = resize(size=(224, 300))(image, label) 30 | 31 | image = ts.Compose([ts.ToTensor(), 32 | ts.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))])(image) 33 | label = ts.ToTensor()(label) 34 | 35 | return {'image': image, 'label': label} 36 | 37 | 38 | def ISIC2018_transform_320(sample, train_type): 39 | image, label = Image.fromarray(np.uint8(sample['image']), mode='RGB'), \ 40 | Image.fromarray(np.uint8(sample['label']), mode='L') 41 | 42 | if train_type == 'train': 43 | image, label = randomcrop(size=(224, 320))(image, label) 44 | image, label = randomflip_rotate(image, label, p=0.5, degrees=30) 45 | else: 46 | image, label = resize(size=(224, 320))(image, label) 47 | 48 | image = ts.Compose([ts.ToTensor(), 49 | ts.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))])(image) 50 | label = ts.ToTensor()(label) 51 | 52 | return {'image': image, 'label': label} 53 | 54 | 55 | def ISIC2018_transform_newdata(sample, train_type): 56 | image, label = Image.fromarray(np.uint8(sample['image']), mode='RGB'), \ 57 | Image.fromarray(np.uint8(sample['label']), mode='L') 58 | 59 | if train_type == 'train': 60 | # image, label = randomcrop(size=(224, 320))(image, label) 61 | image, label = randomflip_rotate(image, label, p=0.5, degrees=30) 62 | else: 63 | image = image 64 | label = label 65 | 66 | image = ts.Compose([ts.ToTensor(), 67 | ts.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))])(image) 68 | label = ts.ToTensor()(label) 69 | 70 | return {'image': image, 'label': label} 71 | 72 | 73 | # these are founctional function for transform 74 | def randomflip_rotate(img, lab, p=0.5, degrees=0): 75 | if random.random() < p: 76 | img = TF.hflip(img) 77 | lab = TF.hflip(lab) 78 | if random.random() < p: 79 | img = TF.vflip(img) 80 | lab = TF.vflip(lab) 81 | 82 | if isinstance(degrees, numbers.Number): 83 | if degrees < 0: 84 | raise ValueError("If degrees is a single number, it must be positive.") 85 | degrees = (-degrees, degrees) 86 | else: 87 | if len(degrees) != 2: 88 | raise ValueError("If degrees is a sequence, it must be of len 2.") 89 | degrees = degrees 90 | angle = random.uniform(degrees[0], degrees[1]) 91 | img = TF.rotate(img, angle) 92 | lab = TF.rotate(lab, angle) 93 | 94 | return img, lab 95 | 96 | 97 | class randomcrop(object): 98 | """Crop the given PIL Image and mask at a random location. 99 | Args: 100 | size (sequence or int): Desired output size of the crop. If size is an 101 | int instead of sequence like (h, w), a square crop (size, size) is 102 | made. 103 | padding (int or sequence, optional): Optional padding on each border 104 | of the image. Default is 0, i.e no padding. If a sequence of length 105 | 4 is provided, it is used to pad left, top, right, bottom borders 106 | respectively. 107 | pad_if_needed (boolean): It will pad the image if smaller than the 108 | desired size to avoid raising an exception. 109 | """ 110 | 111 | def __init__(self, size, padding=0, pad_if_needed=False): 112 | if isinstance(size, numbers.Number): 113 | self.size = (int(size), int(size)) 114 | else: 115 | self.size = size 116 | self.padding = padding 117 | self.pad_if_needed = pad_if_needed 118 | 119 | @staticmethod 120 | def get_params(img, output_size): 121 | """Get parameters for ``crop`` for a random crop. 122 | Args: 123 | img (PIL Image): Image to be cropped. 124 | output_size (tuple): Expected output size of the crop. 125 | Returns: 126 | tuple: params (i, j, h, w) to be passed to ``crop`` for random crop. 127 | """ 128 | w, h = img.size 129 | th, tw = output_size 130 | if w == tw and h == th: 131 | return 0, 0, h, w 132 | 133 | i = random.randint(0, h - th) 134 | j = random.randint(0, w - tw) 135 | return i, j, th, tw 136 | 137 | def __call__(self, img, lab): 138 | """ 139 | Args: 140 | img (PIL Image): Image to be cropped. 141 | lab (PIL Image): Image to be cropped. 142 | Returns: 143 | PIL Image: Cropped image and mask. 144 | """ 145 | if self.padding > 0: 146 | img = TF.pad(img, self.padding) 147 | lab = TF.pad(lab, self.padding) 148 | 149 | # pad the width if needed 150 | if self.pad_if_needed and img.size[0] < self.size[1]: 151 | img = TF.pad(img, (int((1 + self.size[1] - img.size[0]) / 2), 0)) 152 | lab = TF.pad(lab, (int((1 + self.size[1] - lab.size[0]) / 2), 0)) 153 | # pad the height if needed 154 | if self.pad_if_needed and img.size[1] < self.size[0]: 155 | img = TF.pad(img, (0, int((1 + self.size[0] - img.size[1]) / 2))) 156 | lab = TF.pad(lab, (0, int((1 + self.size[0] - lab.size[1]) / 2))) 157 | 158 | i, j, h, w = self.get_params(img, self.size) 159 | 160 | return TF.crop(img, i, j, h, w), TF.crop(lab, i, j, h, w) 161 | 162 | def __repr__(self): 163 | return self.__class__.__name__ + '(size={0}, padding={1})'.format(self.size, self.padding) 164 | 165 | 166 | class resize(object): 167 | """Resize the input PIL Image and mask to the given size. 168 | Args: 169 | size (sequence or int): Desired output size. If size is a sequence like 170 | (h, w), output size will be matched to this. If size is an int, 171 | smaller edge of the image will be matched to this number. 172 | i.e, if height > width, then image will be rescaled to 173 | (size * height / width, size) 174 | interpolation (int, optional): Desired interpolation. Default is 175 | ``PIL.Image.BILINEAR`` 176 | """ 177 | 178 | def __init__(self, size, interpolation=Image.BILINEAR): 179 | assert isinstance(size, int) or (isinstance(size, collections.Iterable) and len(size) == 2) 180 | self.size = size 181 | self.interpolation = interpolation 182 | 183 | def __call__(self, img, lab): 184 | """ 185 | Args: 186 | img (PIL Image): Image to be scaled. 187 | lab (PIL Image): Image to be scaled. 188 | Returns: 189 | PIL Image: Rescaled image and mask. 190 | """ 191 | return TF.resize(img, self.size, self.interpolation), TF.resize(lab, self.size, self.interpolation) 192 | 193 | def __repr__(self): 194 | interpolate_str = _pil_interpolation_to_str[self.interpolation] 195 | return self.__class__.__name__ + '(size={0}, interpolation={1})'.format(self.size, interpolate_str) 196 | 197 | 198 | def itensity_normalize(volume): 199 | """ 200 | normalize the itensity of an nd volume based on the mean and std of nonzeor region 201 | inputs: 202 | volume: the input nd volume 203 | outputs: 204 | out: the normalized n d volume 205 | """ 206 | 207 | # pixels = volume[volume > 0] 208 | mean = volume.mean() 209 | std = volume.std() 210 | out = (volume - mean) / std 211 | out_random = np.random.normal(0, 1, size=volume.shape) 212 | out[volume == 0] = out_random[volume == 0] 213 | 214 | return out -------------------------------------------------------------------------------- /val.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | from glob import glob 4 | 5 | import cv2 6 | import numpy as np 7 | import torch 8 | import torch.backends.cudnn as cudnn 9 | import yaml 10 | from albumentations.augmentations import transforms 11 | from albumentations.core.composition import Compose 12 | 13 | from tqdm import tqdm 14 | 15 | import network 16 | from dataset import Dataset 17 | from metrics import iou_score 18 | from functions import AverageMeter 19 | from albumentations import RandomRotate90, Resize 20 | import time 21 | from network import EIU_Net 22 | 23 | from torch.utils.data import DataLoader 24 | 25 | from metrics import * 26 | 27 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 28 | 29 | 30 | def parse_args(): 31 | parser = argparse.ArgumentParser() 32 | 33 | parser.add_argument('--name', default='ISIC2018_UNet_woDS', 34 | help='model name') 35 | 36 | args = parser.parse_args() 37 | 38 | return args 39 | 40 | 41 | def main(): 42 | args = parse_args() 43 | 44 | with open('models_EIUNet/%s/config.yml' % args.name, 'r') as f: 45 | config = yaml.load(f, Loader=yaml.FullLoader) 46 | 47 | print('-' * 20) 48 | for key in config.keys(): 49 | print('%s: %s' % (key, str(config[key]))) 50 | print('-' * 20) 51 | 52 | cudnn.benchmark = True 53 | 54 | print("=> creating model %s" % config['arch']) 55 | 56 | model = network.__dict__[config['arch']](config['input_channels'], config['num_classes']) 57 | model = model.to(device) 58 | 59 | def add_img(path): 60 | img_ids_s = [] 61 | with open(path, 'r') as f: 62 | for lines in f.readlines(): 63 | img_id = lines[:-5] 64 | img_ids_s.append(img_id) 65 | return img_ids_s 66 | 67 | test_path = 'test.list' 68 | test_img_ids = add_img(test_path) 69 | 70 | model.load_state_dict(torch.load('models_EIUNet/%s/test_model_1.pth' % 71 | config['name'])) 72 | 73 | model.eval() 74 | 75 | test_transform = Compose([ 76 | Resize(config['input_h'], config['input_w']), 77 | transforms.Normalize(), 78 | ]) 79 | 80 | test_dataset = Dataset( 81 | img_ids=test_img_ids, 82 | img_dir=os.path.join(""), 83 | mask_dir=os.path.join(""), 84 | img_ext=config['img_ext'], 85 | mask_ext=config['mask_ext'], 86 | num_classes=config['num_classes'], 87 | transform=test_transform) 88 | test_loader = DataLoader( 89 | test_dataset, 90 | batch_size=1, 91 | shuffle=False, 92 | num_workers=config['num_workers'], 93 | drop_last=False, 94 | pin_memory=True) 95 | 96 | jc_avg_meter = AverageMeter() 97 | dice_avg_meter = AverageMeter() 98 | se_avg_meter = AverageMeter() 99 | sp_avg_meter = AverageMeter() 100 | acc_avg_meter = AverageMeter() 101 | precision_avg_meter = AverageMeter() 102 | recall_avg_meter = AverageMeter() 103 | 104 | count = 0 105 | 106 | for c in range(config['num_classes']): 107 | os.makedirs(os.path.join('heatmap', config['name'], str(c)), exist_ok=True) 108 | 109 | with torch.no_grad(): 110 | for input, target, meta in tqdm(test_loader, total=len(test_dataset)): 111 | input = input.to(device) 112 | target = target.to(device) 113 | 114 | output = model(input) 115 | 116 | jc, dice, se, sp, acc, pre, rec = jaccard(output, target), \ 117 | dice_co(output, target), \ 118 | sensitivity(output, target), \ 119 | specificity(output, target), \ 120 | accuracy(output, target), precision(output, target), recall(output, 121 | target) 122 | 123 | jc_avg_meter.update(jc, input.size(0)) 124 | dice_avg_meter.update(dice, input.size(0)) 125 | se_avg_meter.update(se, input.size(0)) 126 | sp_avg_meter.update(sp, input.size(0)) 127 | acc_avg_meter.update(acc, input.size(0)) 128 | precision_avg_meter.update(pre, input.size(0)) 129 | recall_avg_meter.update(rec, input.size(0)) 130 | 131 | output = torch.sigmoid(output).cpu().numpy() 132 | output[output >= 0.5] = 1 133 | output[output < 0.5] = 0 134 | 135 | for i in range(len(output)): 136 | for c in range(config['num_classes']): 137 | cv2.imwrite(os.path.join('heatmap', config['name'], str(c), meta['img_id'][i] + '.jpg'), 138 | (output[i, c] * 255).astype('uint8')) 139 | 140 | print('jc: %.4f ' % jc_avg_meter.avg) 141 | print('Dice: %.4f ' % dice_avg_meter.avg) 142 | print('se: %.4f ' % se_avg_meter.avg) 143 | print('sp: %.4f ' % sp_avg_meter.avg) 144 | print('acc: %.4f ' % acc_avg_meter.avg) 145 | 146 | torch.cuda.empty_cache() 147 | 148 | 149 | if __name__ == '__main__': 150 | main() 151 | --------------------------------------------------------------------------------