├── README.md ├── data ├── 5_train_supervised.txt ├── 5_train_unsupervised.txt ├── WHU-CD-List │ ├── 10_train_supervised.txt │ ├── 10_train_unsupervised.txt │ ├── 20_train_supervised.txt │ ├── 20_train_unsupervised.txt │ ├── 40_train_supervised.txt │ ├── 40_train_unsupervised.txt │ ├── 5_train_supervised.txt │ ├── 5_train_unsupervised.txt │ └── WHU-CD.png ├── __pycache__ │ ├── semi_data.cpython-38.pyc │ └── transform.cpython-38.pyc ├── generate_semicd_splits.py ├── semi_data.py ├── test.txt ├── train.txt ├── transform.py └── val.txt ├── inference.py ├── main.py └── models ├── Change_Detection.py ├── __pycache__ ├── Change_Detection.cpython-38.pyc ├── base_model.cpython-38.pyc ├── decoders.cpython-38.pyc └── encoder.cpython-38.pyc ├── backbones ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-37.pyc │ ├── __init__.cpython-38.pyc │ ├── __init__.cpython-39.pyc │ ├── module_helper.cpython-37.pyc │ ├── module_helper.cpython-38.pyc │ ├── module_helper.cpython-39.pyc │ ├── resnet_backbone.cpython-37.pyc │ ├── resnet_backbone.cpython-38.pyc │ ├── resnet_backbone.cpython-39.pyc │ ├── resnet_models.cpython-37.pyc │ ├── resnet_models.cpython-38.pyc │ └── resnet_models.cpython-39.pyc ├── get_pretrained_model.sh ├── module_helper.py ├── resnet_backbone.py └── resnet_models.py ├── base_model.py ├── decoders.py └── encoder.py /README.md: -------------------------------------------------------------------------------- 1 | # RC-Change-Detection 2 | This is the official PyTorch implementation of our TGRS 2022 paper: 3 | 4 | 《Reliable Contrastive Learning for Semi-supervised Change Detection in Remote Sensing Images》 5 | 6 | Jia-Xin Wang, Teng Li∗, Si-Bao Chen, Jin Tang, Bin Luo and Richard C. Wilson 7 | -------------------------------------------------------------------------------- /data/5_train_supervised.txt: -------------------------------------------------------------------------------- 1 | train_369_10.png 2 | train_151_6.png 3 | train_43_10.png 4 | train_314_9.png 5 | train_126_2.png 6 | train_120_1.png 7 | train_183_11.png 8 | train_72_3.png 9 | train_99_4.png 10 | train_347_1.png 11 | train_84_4.png 12 | train_441_9.png 13 | train_3_8.png 14 | train_259_5.png 15 | train_35_4.png 16 | train_165_1.png 17 | train_10_2.png 18 | train_97_8.png 19 | train_145_14.png 20 | train_133_10.png 21 | train_142_3.png 22 | train_197_13.png 23 | train_340_8.png 24 | train_350_7.png 25 | train_90_3.png 26 | train_208_11.png 27 | train_308_10.png 28 | train_280_5.png 29 | train_430_16.png 30 | train_281_6.png 31 | train_146_6.png 32 | train_16_7.png 33 | train_67_14.png 34 | train_395_10.png 35 | train_393_12.png 36 | train_119_15.png 37 | train_9_12.png 38 | train_146_11.png 39 | train_113_3.png 40 | train_223_5.png 41 | train_250_9.png 42 | train_270_16.png 43 | train_412_13.png 44 | train_214_2.png 45 | train_32_1.png 46 | train_340_12.png 47 | train_240_2.png 48 | train_403_5.png 49 | train_124_6.png 50 | train_309_8.png 51 | train_281_15.png 52 | train_125_7.png 53 | train_314_7.png 54 | train_366_14.png 55 | train_49_12.png 56 | train_214_11.png 57 | train_196_10.png 58 | train_204_10.png 59 | train_36_11.png 60 | train_152_7.png 61 | train_186_5.png 62 | train_414_2.png 63 | train_272_16.png 64 | train_118_10.png 65 | train_86_16.png 66 | train_38_7.png 67 | train_123_13.png 68 | train_96_10.png 69 | train_194_7.png 70 | train_150_3.png 71 | train_238_12.png 72 | train_430_4.png 73 | train_181_12.png 74 | train_273_12.png 75 | train_386_4.png 76 | train_415_15.png 77 | train_372_7.png 78 | train_222_11.png 79 | train_297_11.png 80 | train_49_15.png 81 | train_405_1.png 82 | train_182_15.png 83 | train_342_15.png 84 | train_154_10.png 85 | train_371_5.png 86 | train_369_8.png 87 | train_314_12.png 88 | train_6_15.png 89 | train_110_11.png 90 | train_214_7.png 91 | train_414_6.png 92 | train_376_11.png 93 | train_243_6.png 94 | train_351_4.png 95 | train_194_14.png 96 | train_241_12.png 97 | train_296_9.png 98 | train_142_11.png 99 | train_342_2.png 100 | train_349_16.png 101 | train_5_12.png 102 | train_441_6.png 103 | train_367_15.png 104 | train_33_7.png 105 | train_90_10.png 106 | train_102_8.png 107 | train_407_11.png 108 | train_210_7.png 109 | train_83_7.png 110 | train_398_12.png 111 | train_110_1.png 112 | train_292_4.png 113 | train_411_9.png 114 | train_60_9.png 115 | train_210_4.png 116 | train_353_7.png 117 | train_274_7.png 118 | train_72_4.png 119 | train_154_8.png 120 | train_265_14.png 121 | train_260_1.png 122 | train_244_1.png 123 | train_249_16.png 124 | train_271_3.png 125 | train_131_1.png 126 | train_355_16.png 127 | train_299_10.png 128 | train_90_12.png 129 | train_367_5.png 130 | train_102_12.png 131 | train_422_10.png 132 | train_278_10.png 133 | train_228_7.png 134 | train_357_16.png 135 | train_402_12.png 136 | train_102_7.png 137 | train_366_15.png 138 | train_190_5.png 139 | train_12_11.png 140 | train_254_9.png 141 | train_76_15.png 142 | train_245_5.png 143 | train_250_13.png 144 | train_49_6.png 145 | train_431_8.png 146 | train_92_14.png 147 | train_52_13.png 148 | train_318_16.png 149 | train_408_2.png 150 | train_126_5.png 151 | train_312_2.png 152 | train_360_4.png 153 | train_427_6.png 154 | train_38_9.png 155 | train_59_10.png 156 | train_69_16.png 157 | train_68_5.png 158 | train_388_6.png 159 | train_199_13.png 160 | train_309_15.png 161 | train_294_5.png 162 | train_82_4.png 163 | train_379_14.png 164 | train_186_6.png 165 | train_438_14.png 166 | train_148_5.png 167 | train_127_7.png 168 | train_111_4.png 169 | train_10_15.png 170 | train_356_5.png 171 | train_38_8.png 172 | train_64_6.png 173 | train_126_12.png 174 | train_367_10.png 175 | train_105_1.png 176 | train_4_7.png 177 | train_219_7.png 178 | train_11_4.png 179 | train_305_13.png 180 | train_56_14.png 181 | train_75_16.png 182 | train_365_14.png 183 | train_193_14.png 184 | train_177_11.png 185 | train_240_8.png 186 | train_394_3.png 187 | train_11_2.png 188 | train_405_10.png 189 | train_278_7.png 190 | train_338_8.png 191 | train_50_13.png 192 | train_67_6.png 193 | train_116_12.png 194 | train_275_9.png 195 | train_253_3.png 196 | train_113_13.png 197 | train_436_5.png 198 | train_7_1.png 199 | train_56_4.png 200 | train_422_7.png 201 | train_334_3.png 202 | train_245_4.png 203 | train_439_8.png 204 | train_124_5.png 205 | train_366_8.png 206 | train_83_11.png 207 | train_373_5.png 208 | train_243_1.png 209 | train_47_6.png 210 | train_117_12.png 211 | train_319_13.png 212 | train_134_11.png 213 | train_224_11.png 214 | train_200_14.png 215 | train_244_10.png 216 | train_98_2.png 217 | train_40_1.png 218 | train_289_4.png 219 | train_53_6.png 220 | train_339_14.png 221 | train_364_11.png 222 | train_175_15.png 223 | train_442_9.png 224 | train_75_10.png 225 | train_61_9.png 226 | train_310_13.png 227 | train_213_5.png 228 | train_168_10.png 229 | train_226_2.png 230 | train_84_7.png 231 | train_269_16.png 232 | train_185_10.png 233 | train_365_8.png 234 | train_276_4.png 235 | train_376_8.png 236 | train_216_1.png 237 | train_13_6.png 238 | train_412_9.png 239 | train_149_8.png 240 | train_318_8.png 241 | train_186_16.png 242 | train_341_6.png 243 | train_216_13.png 244 | train_385_4.png 245 | train_63_15.png 246 | train_424_7.png 247 | train_237_2.png 248 | train_270_15.png 249 | train_34_6.png 250 | train_145_9.png 251 | train_132_12.png 252 | train_150_5.png 253 | train_373_6.png 254 | train_316_5.png 255 | train_157_2.png 256 | train_153_6.png 257 | train_362_7.png 258 | train_299_11.png 259 | train_2_15.png 260 | train_86_5.png 261 | train_360_15.png 262 | train_290_7.png 263 | train_18_16.png 264 | train_22_3.png 265 | train_383_7.png 266 | train_235_1.png 267 | train_319_1.png 268 | train_99_14.png 269 | train_267_3.png 270 | train_381_5.png 271 | train_323_16.png 272 | train_203_16.png 273 | train_130_7.png 274 | train_376_13.png 275 | train_208_3.png 276 | train_431_2.png 277 | train_141_13.png 278 | train_439_9.png 279 | train_440_8.png 280 | train_200_4.png 281 | train_241_9.png 282 | train_71_9.png 283 | train_185_14.png 284 | train_163_7.png 285 | train_333_2.png 286 | train_220_9.png 287 | train_10_8.png 288 | train_260_7.png 289 | train_49_9.png 290 | train_275_3.png 291 | train_396_8.png 292 | train_204_5.png 293 | train_183_4.png 294 | train_229_16.png 295 | train_48_6.png 296 | train_275_12.png 297 | train_87_1.png 298 | train_393_5.png 299 | train_40_14.png 300 | train_154_15.png 301 | train_294_8.png 302 | train_295_9.png 303 | train_379_9.png 304 | train_411_8.png 305 | train_319_2.png 306 | train_277_15.png 307 | train_112_10.png 308 | train_358_2.png 309 | train_141_11.png 310 | train_10_14.png 311 | train_112_13.png 312 | train_192_16.png 313 | train_443_7.png 314 | train_312_11.png 315 | train_62_9.png 316 | train_197_9.png 317 | train_20_14.png 318 | train_152_3.png 319 | train_69_11.png 320 | train_155_8.png 321 | train_21_7.png 322 | train_413_4.png 323 | train_389_2.png 324 | train_270_13.png 325 | train_19_9.png 326 | train_73_8.png 327 | train_252_9.png 328 | train_277_7.png 329 | train_239_15.png 330 | train_443_15.png 331 | train_415_10.png 332 | train_132_11.png 333 | train_302_1.png 334 | train_164_3.png 335 | train_383_9.png 336 | train_207_13.png 337 | train_420_15.png 338 | train_40_8.png 339 | train_250_7.png 340 | train_76_9.png 341 | train_156_6.png 342 | train_155_12.png 343 | train_278_2.png 344 | train_376_7.png 345 | train_8_3.png 346 | train_363_3.png 347 | train_1_8.png 348 | train_236_15.png 349 | train_290_6.png 350 | train_88_11.png 351 | train_427_12.png 352 | train_413_12.png 353 | train_435_6.png 354 | train_178_12.png 355 | train_153_1.png 356 | train_20_9.png 357 | -------------------------------------------------------------------------------- /data/WHU-CD-List/10_train_supervised.txt: -------------------------------------------------------------------------------- 1 | whucd_02547.png 2 | whucd_05868.png 3 | whucd_00871.png 4 | whucd_07001.png 5 | whucd_04331.png 6 | whucd_04228.png 7 | whucd_06246.png 8 | whucd_00630.png 9 | whucd_04199.png 10 | whucd_05344.png 11 | whucd_03872.png 12 | whucd_04598.png 13 | whucd_07147.png 14 | whucd_03283.png 15 | whucd_01681.png 16 | whucd_02180.png 17 | whucd_02309.png 18 | whucd_01313.png 19 | whucd_01438.png 20 | whucd_03711.png 21 | whucd_02864.png 22 | whucd_05053.png 23 | whucd_00738.png 24 | whucd_01140.png 25 | whucd_05244.png 26 | whucd_02540.png 27 | whucd_07290.png 28 | whucd_06515.png 29 | whucd_06065.png 30 | whucd_04359.png 31 | whucd_00076.png 32 | whucd_03974.png 33 | whucd_03150.png 34 | whucd_05995.png 35 | whucd_05258.png 36 | whucd_03620.png 37 | whucd_01722.png 38 | whucd_05123.png 39 | whucd_02224.png 40 | whucd_00142.png 41 | whucd_07335.png 42 | whucd_06469.png 43 | whucd_01092.png 44 | whucd_06397.png 45 | whucd_00361.png 46 | whucd_03534.png 47 | whucd_06881.png 48 | whucd_03846.png 49 | whucd_07276.png 50 | whucd_02978.png 51 | whucd_00574.png 52 | whucd_03907.png 53 | whucd_01850.png 54 | whucd_04125.png 55 | whucd_02663.png 56 | whucd_06647.png 57 | whucd_02711.png 58 | whucd_02480.png 59 | whucd_05992.png 60 | whucd_06413.png 61 | whucd_03505.png 62 | whucd_00443.png 63 | whucd_07392.png 64 | whucd_04111.png 65 | whucd_03340.png 66 | whucd_07039.png 67 | whucd_06895.png 68 | whucd_04094.png 69 | whucd_03466.png 70 | whucd_03867.png 71 | whucd_01065.png 72 | whucd_00694.png 73 | whucd_03042.png 74 | whucd_05891.png 75 | whucd_03103.png 76 | whucd_00187.png 77 | whucd_06271.png 78 | whucd_02566.png 79 | whucd_06682.png 80 | whucd_04553.png 81 | whucd_04126.png 82 | whucd_00818.png 83 | whucd_04479.png 84 | whucd_00948.png 85 | whucd_01101.png 86 | whucd_07326.png 87 | whucd_02800.png 88 | whucd_00784.png 89 | whucd_07322.png 90 | whucd_04233.png 91 | whucd_06380.png 92 | whucd_01088.png 93 | whucd_01911.png 94 | whucd_07040.png 95 | whucd_03986.png 96 | whucd_04130.png 97 | whucd_05489.png 98 | whucd_00064.png 99 | whucd_01300.png 100 | whucd_06266.png 101 | whucd_03801.png 102 | whucd_00838.png 103 | whucd_03725.png 104 | whucd_05310.png 105 | whucd_01945.png 106 | whucd_04170.png 107 | whucd_00564.png 108 | whucd_00524.png 109 | whucd_02138.png 110 | whucd_03104.png 111 | whucd_06500.png 112 | whucd_03979.png 113 | whucd_06504.png 114 | whucd_03279.png 115 | whucd_04868.png 116 | whucd_00437.png 117 | whucd_03851.png 118 | whucd_04112.png 119 | whucd_04117.png 120 | whucd_04369.png 121 | whucd_04493.png 122 | whucd_03927.png 123 | whucd_07146.png 124 | whucd_07412.png 125 | whucd_03043.png 126 | whucd_03153.png 127 | whucd_05861.png 128 | whucd_04121.png 129 | whucd_03748.png 130 | whucd_06903.png 131 | whucd_00867.png 132 | whucd_03114.png 133 | whucd_02524.png 134 | whucd_06333.png 135 | whucd_01093.png 136 | whucd_06556.png 137 | whucd_05107.png 138 | whucd_05441.png 139 | whucd_04863.png 140 | whucd_00840.png 141 | whucd_02985.png 142 | whucd_04329.png 143 | whucd_04727.png 144 | whucd_07167.png 145 | whucd_03425.png 146 | whucd_05620.png 147 | whucd_00109.png 148 | whucd_00392.png 149 | whucd_03238.png 150 | whucd_04724.png 151 | whucd_06467.png 152 | -------------------------------------------------------------------------------- /data/WHU-CD-List/10_train_unsupervised.txt: -------------------------------------------------------------------------------- 1 | whucd_02903.png 2 | whucd_02251.png 3 | whucd_06225.png 4 | whucd_04473.png 5 | whucd_00198.png 6 | whucd_04096.png 7 | whucd_06788.png 8 | whucd_00483.png 9 | whucd_03792.png 10 | whucd_05110.png 11 | whucd_03791.png 12 | whucd_03117.png 13 | whucd_01864.png 14 | whucd_03155.png 15 | whucd_05365.png 16 | whucd_03837.png 17 | whucd_04927.png 18 | whucd_04226.png 19 | whucd_03964.png 20 | whucd_06316.png 21 | whucd_02230.png 22 | whucd_07402.png 23 | whucd_04980.png 24 | whucd_05217.png 25 | whucd_00949.png 26 | whucd_04859.png 27 | whucd_03976.png 28 | whucd_03843.png 29 | whucd_05874.png 30 | whucd_00235.png 31 | whucd_06020.png 32 | whucd_01298.png 33 | whucd_06394.png 34 | whucd_03180.png 35 | whucd_03316.png 36 | whucd_05218.png 37 | whucd_00231.png 38 | whucd_03289.png 39 | whucd_00482.png 40 | whucd_00571.png 41 | whucd_04993.png 42 | whucd_02987.png 43 | whucd_03593.png 44 | whucd_03866.png 45 | whucd_03713.png 46 | whucd_00190.png 47 | whucd_04738.png 48 | whucd_04983.png 49 | whucd_00141.png 50 | whucd_03424.png 51 | whucd_06189.png 52 | whucd_03720.png 53 | whucd_07149.png 54 | whucd_01450.png 55 | whucd_04485.png 56 | whucd_04723.png 57 | whucd_04851.png 58 | whucd_04618.png 59 | whucd_05433.png 60 | whucd_04206.png 61 | whucd_01217.png 62 | whucd_06307.png 63 | whucd_07142.png 64 | whucd_06890.png 65 | whucd_07152.png 66 | whucd_01079.png 67 | whucd_07321.png 68 | whucd_01165.png 69 | whucd_05723.png 70 | whucd_04310.png 71 | whucd_01506.png 72 | whucd_03284.png 73 | whucd_05305.png 74 | whucd_05553.png 75 | whucd_04091.png 76 | whucd_05248.png 77 | whucd_01324.png 78 | whucd_03999.png 79 | whucd_06272.png 80 | whucd_03530.png 81 | whucd_00227.png 82 | whucd_03579.png 83 | whucd_03493.png 84 | whucd_06875.png 85 | whucd_01164.png 86 | whucd_06912.png 87 | whucd_01912.png 88 | whucd_05128.png 89 | whucd_06910.png 90 | whucd_05234.png 91 | whucd_02354.png 92 | whucd_04691.png 93 | whucd_06254.png 94 | whucd_04216.png 95 | whucd_05133.png 96 | whucd_06666.png 97 | whucd_01861.png 98 | whucd_01431.png 99 | whucd_06268.png 100 | whucd_04849.png 101 | whucd_01127.png 102 | whucd_03112.png 103 | whucd_05001.png 104 | whucd_04619.png 105 | whucd_07394.png 106 | whucd_03475.png 107 | whucd_03657.png 108 | whucd_05122.png 109 | whucd_01421.png 110 | whucd_06045.png 111 | whucd_02018.png 112 | whucd_06576.png 113 | whucd_05362.png 114 | whucd_04611.png 115 | whucd_06400.png 116 | whucd_05494.png 117 | whucd_00429.png 118 | whucd_01509.png 119 | whucd_05314.png 120 | whucd_05831.png 121 | whucd_06148.png 122 | whucd_01380.png 123 | whucd_05369.png 124 | whucd_06922.png 125 | whucd_01204.png 126 | whucd_07137.png 127 | whucd_05237.png 128 | whucd_07269.png 129 | whucd_04994.png 130 | whucd_01798.png 131 | whucd_03712.png 132 | whucd_06021.png 133 | whucd_03677.png 134 | whucd_07017.png 135 | whucd_05000.png 136 | whucd_06376.png 137 | whucd_06315.png 138 | whucd_00422.png 139 | whucd_00412.png 140 | whucd_02036.png 141 | whucd_05623.png 142 | whucd_04614.png 143 | whucd_04045.png 144 | whucd_02872.png 145 | whucd_06243.png 146 | whucd_05239.png 147 | whucd_07041.png 148 | whucd_02983.png 149 | whucd_06382.png 150 | whucd_06770.png 151 | whucd_05367.png 152 | whucd_05619.png 153 | whucd_03538.png 154 | whucd_03848.png 155 | whucd_01598.png 156 | whucd_07006.png 157 | whucd_00812.png 158 | whucd_00942.png 159 | whucd_04854.png 160 | whucd_00128.png 161 | whucd_02884.png 162 | whucd_04753.png 163 | whucd_04987.png 164 | whucd_06880.png 165 | whucd_00150.png 166 | whucd_00171.png 167 | whucd_01119.png 168 | whucd_03474.png 169 | whucd_00068.png 170 | whucd_03915.png 171 | whucd_04348.png 172 | whucd_05894.png 173 | whucd_01091.png 174 | whucd_03914.png 175 | whucd_02730.png 176 | whucd_03789.png 177 | whucd_06643.png 178 | whucd_01209.png 179 | whucd_02523.png 180 | whucd_03966.png 181 | whucd_01436.png 182 | whucd_00382.png 183 | whucd_01019.png 184 | whucd_06095.png 185 | whucd_06116.png 186 | whucd_03189.png 187 | whucd_00153.png 188 | whucd_03663.png 189 | whucd_00359.png 190 | whucd_05429.png 191 | whucd_00677.png 192 | whucd_07267.png 193 | whucd_00339.png 194 | whucd_01865.png 195 | whucd_02990.png 196 | whucd_06629.png 197 | whucd_01041.png 198 | whucd_07194.png 199 | whucd_05499.png 200 | whucd_00297.png 201 | whucd_07074.png 202 | whucd_01186.png 203 | whucd_06115.png 204 | whucd_01923.png 205 | whucd_02319.png 206 | whucd_04313.png 207 | whucd_03110.png 208 | whucd_01572.png 209 | whucd_00191.png 210 | whucd_00201.png 211 | whucd_06652.png 212 | whucd_05862.png 213 | whucd_04988.png 214 | whucd_03163.png 215 | whucd_02796.png 216 | whucd_02948.png 217 | whucd_06383.png 218 | whucd_00744.png 219 | whucd_03659.png 220 | whucd_04985.png 221 | whucd_05700.png 222 | whucd_07002.png 223 | whucd_02409.png 224 | whucd_06785.png 225 | whucd_05376.png 226 | whucd_01062.png 227 | whucd_00950.png 228 | whucd_02102.png 229 | whucd_02857.png 230 | whucd_05366.png 231 | whucd_02382.png 232 | whucd_04874.png 233 | whucd_04218.png 234 | whucd_02378.png 235 | whucd_03183.png 236 | whucd_01180.png 237 | whucd_03469.png 238 | whucd_02598.png 239 | whucd_04247.png 240 | whucd_05108.png 241 | whucd_06323.png 242 | whucd_00051.png 243 | whucd_05054.png 244 | whucd_03972.png 245 | whucd_03901.png 246 | whucd_00691.png 247 | whucd_02135.png 248 | whucd_01997.png 249 | whucd_04599.png 250 | whucd_03597.png 251 | whucd_03788.png 252 | whucd_01117.png 253 | whucd_00549.png 254 | whucd_06401.png 255 | whucd_05372.png 256 | whucd_06480.png 257 | whucd_05375.png 258 | whucd_07128.png 259 | whucd_00627.png 260 | whucd_06891.png 261 | whucd_03963.png 262 | whucd_05390.png 263 | whucd_04100.png 264 | whucd_01009.png 265 | whucd_03622.png 266 | whucd_00353.png 267 | whucd_00638.png 268 | whucd_00572.png 269 | whucd_04996.png 270 | whucd_03965.png 271 | whucd_06711.png 272 | whucd_04223.png 273 | whucd_01188.png 274 | whucd_06322.png 275 | whucd_00203.png 276 | whucd_00391.png 277 | whucd_01310.png 278 | whucd_03552.png 279 | whucd_06790.png 280 | whucd_01169.png 281 | whucd_02829.png 282 | whucd_00420.png 283 | whucd_06143.png 284 | whucd_05748.png 285 | whucd_00062.png 286 | whucd_01032.png 287 | whucd_02143.png 288 | whucd_03827.png 289 | whucd_05036.png 290 | whucd_03115.png 291 | whucd_01039.png 292 | whucd_07159.png 293 | whucd_03074.png 294 | whucd_06538.png 295 | whucd_00347.png 296 | whucd_04602.png 297 | whucd_05706.png 298 | whucd_00517.png 299 | whucd_07161.png 300 | whucd_00502.png 301 | whucd_04612.png 302 | whucd_03403.png 303 | whucd_00393.png 304 | whucd_05580.png 305 | whucd_06717.png 306 | whucd_07305.png 307 | whucd_05242.png 308 | whucd_03406.png 309 | whucd_06451.png 310 | whucd_05897.png 311 | whucd_06876.png 312 | whucd_00163.png 313 | whucd_07213.png 314 | whucd_05111.png 315 | whucd_02383.png 316 | whucd_03955.png 317 | whucd_01218.png 318 | whucd_03632.png 319 | whucd_01034.png 320 | whucd_04468.png 321 | whucd_06372.png 322 | whucd_06718.png 323 | whucd_04234.png 324 | whucd_00924.png 325 | whucd_07022.png 326 | whucd_03844.png 327 | whucd_00879.png 328 | whucd_07287.png 329 | whucd_05115.png 330 | whucd_01178.png 331 | whucd_05468.png 332 | whucd_03621.png 333 | whucd_02012.png 334 | whucd_07143.png 335 | whucd_07196.png 336 | whucd_04115.png 337 | whucd_06917.png 338 | whucd_04053.png 339 | whucd_05496.png 340 | whucd_00085.png 341 | whucd_05443.png 342 | whucd_01849.png 343 | whucd_06501.png 344 | whucd_07162.png 345 | whucd_01867.png 346 | whucd_07389.png 347 | whucd_03728.png 348 | whucd_04243.png 349 | whucd_03154.png 350 | whucd_04439.png 351 | whucd_03660.png 352 | whucd_05990.png 353 | whucd_03588.png 354 | whucd_06319.png 355 | whucd_04975.png 356 | whucd_00566.png 357 | whucd_01290.png 358 | whucd_02071.png 359 | whucd_00910.png 360 | whucd_05698.png 361 | whucd_04347.png 362 | whucd_03994.png 363 | whucd_03067.png 364 | whucd_04998.png 365 | whucd_01060.png 366 | whucd_06341.png 367 | whucd_04484.png 368 | whucd_07043.png 369 | whucd_04728.png 370 | whucd_00887.png 371 | whucd_00110.png 372 | whucd_03124.png 373 | whucd_07199.png 374 | whucd_04593.png 375 | whucd_01430.png 376 | whucd_06502.png 377 | whucd_03839.png 378 | whucd_05485.png 379 | whucd_01343.png 380 | whucd_01976.png 381 | whucd_00657.png 382 | whucd_00587.png 383 | whucd_00965.png 384 | whucd_05124.png 385 | whucd_04865.png 386 | whucd_04328.png 387 | whucd_07003.png 388 | whucd_02586.png 389 | whucd_03710.png 390 | whucd_07036.png 391 | whucd_01219.png 392 | whucd_06559.png 393 | whucd_04248.png 394 | whucd_06787.png 395 | whucd_06591.png 396 | whucd_01159.png 397 | whucd_01547.png 398 | whucd_07069.png 399 | whucd_06824.png 400 | whucd_04839.png 401 | whucd_04565.png 402 | whucd_03585.png 403 | whucd_00751.png 404 | whucd_01220.png 405 | whucd_02837.png 406 | whucd_02379.png 407 | whucd_03116.png 408 | whucd_00195.png 409 | whucd_02320.png 410 | whucd_03803.png 411 | whucd_06792.png 412 | whucd_00743.png 413 | whucd_05445.png 414 | whucd_00346.png 415 | whucd_01672.png 416 | whucd_05860.png 417 | whucd_04039.png 418 | whucd_06882.png 419 | whucd_05166.png 420 | whucd_05686.png 421 | whucd_06911.png 422 | whucd_05660.png 423 | whucd_01190.png 424 | whucd_04991.png 425 | whucd_05869.png 426 | whucd_00335.png 427 | whucd_05235.png 428 | whucd_01296.png 429 | whucd_04494.png 430 | whucd_01383.png 431 | whucd_04221.png 432 | whucd_03960.png 433 | whucd_02883.png 434 | whucd_02861.png 435 | whucd_04474.png 436 | whucd_07275.png 437 | whucd_02350.png 438 | whucd_04342.png 439 | whucd_00539.png 440 | whucd_04220.png 441 | whucd_03666.png 442 | whucd_04999.png 443 | whucd_04951.png 444 | whucd_07075.png 445 | whucd_05738.png 446 | whucd_04106.png 447 | whucd_01293.png 448 | whucd_05109.png 449 | whucd_07048.png 450 | whucd_07289.png 451 | whucd_02677.png 452 | whucd_05259.png 453 | whucd_05487.png 454 | whucd_00022.png 455 | whucd_03551.png 456 | whucd_01924.png 457 | whucd_05689.png 458 | whucd_04027.png 459 | whucd_04989.png 460 | whucd_07417.png 461 | whucd_01471.png 462 | whucd_04811.png 463 | whucd_02852.png 464 | whucd_00332.png 465 | whucd_00413.png 466 | whucd_06403.png 467 | whucd_02881.png 468 | whucd_03902.png 469 | whucd_06867.png 470 | whucd_07265.png 471 | whucd_03918.png 472 | whucd_06789.png 473 | whucd_04626.png 474 | whucd_03041.png 475 | whucd_04977.png 476 | whucd_07140.png 477 | whucd_00771.png 478 | whucd_02137.png 479 | whucd_03460.png 480 | whucd_07386.png 481 | whucd_04847.png 482 | whucd_07390.png 483 | whucd_05704.png 484 | whucd_03111.png 485 | whucd_03314.png 486 | whucd_06624.png 487 | whucd_03740.png 488 | whucd_06255.png 489 | whucd_06913.png 490 | whucd_03674.png 491 | whucd_00256.png 492 | whucd_07148.png 493 | whucd_02441.png 494 | whucd_04873.png 495 | whucd_04966.png 496 | whucd_03912.png 497 | whucd_04245.png 498 | whucd_02923.png 499 | whucd_00309.png 500 | whucd_02665.png 501 | whucd_04469.png 502 | whucd_03476.png 503 | whucd_02128.png 504 | whucd_02989.png 505 | whucd_02038.png 506 | whucd_02984.png 507 | whucd_06499.png 508 | whucd_05066.png 509 | whucd_04850.png 510 | whucd_04476.png 511 | whucd_01038.png 512 | whucd_04309.png 513 | whucd_07255.png 514 | whucd_00503.png 515 | whucd_04840.png 516 | whucd_03926.png 517 | whucd_03113.png 518 | whucd_06665.png 519 | whucd_02567.png 520 | whucd_05240.png 521 | whucd_06655.png 522 | whucd_04752.png 523 | whucd_01306.png 524 | whucd_01562.png 525 | whucd_07393.png 526 | whucd_02396.png 527 | whucd_05491.png 528 | whucd_00819.png 529 | whucd_05130.png 530 | whucd_03990.png 531 | whucd_02622.png 532 | whucd_02106.png 533 | whucd_00586.png 534 | whucd_00029.png 535 | whucd_03706.png 536 | whucd_06784.png 537 | whucd_01295.png 538 | whucd_04735.png 539 | whucd_03723.png 540 | whucd_04487.png 541 | whucd_07266.png 542 | whucd_00333.png 543 | whucd_03969.png 544 | whucd_04001.png 545 | whucd_01181.png 546 | whucd_04625.png 547 | whucd_04559.png 548 | whucd_03716.png 549 | whucd_05517.png 550 | whucd_03053.png 551 | whucd_05865.png 552 | whucd_05371.png 553 | whucd_00189.png 554 | whucd_04178.png 555 | whucd_04344.png 556 | whucd_05373.png 557 | whucd_02739.png 558 | whucd_04722.png 559 | whucd_03995.png 560 | whucd_06528.png 561 | whucd_05232.png 562 | whucd_01010.png 563 | whucd_05687.png 564 | whucd_00712.png 565 | whucd_03342.png 566 | whucd_06375.png 567 | whucd_07024.png 568 | whucd_07286.png 569 | whucd_06626.png 570 | whucd_07254.png 571 | whucd_01740.png 572 | whucd_05863.png 573 | whucd_01468.png 574 | whucd_00303.png 575 | whucd_05753.png 576 | whucd_02713.png 577 | whucd_01446.png 578 | whucd_02745.png 579 | whucd_06918.png 580 | whucd_01294.png 581 | whucd_01975.png 582 | whucd_02726.png 583 | whucd_06914.png 584 | whucd_01229.png 585 | whucd_00598.png 586 | whucd_01470.png 587 | whucd_00745.png 588 | whucd_00947.png 589 | whucd_06774.png 590 | whucd_02103.png 591 | whucd_02218.png 592 | whucd_07163.png 593 | whucd_03278.png 594 | whucd_05769.png 595 | whucd_03992.png 596 | whucd_02315.png 597 | whucd_04028.png 598 | whucd_01059.png 599 | whucd_05057.png 600 | whucd_01160.png 601 | whucd_02725.png 602 | whucd_05940.png 603 | whucd_00218.png 604 | whucd_06306.png 605 | whucd_07129.png 606 | whucd_00362.png 607 | whucd_04229.png 608 | whucd_03913.png 609 | whucd_05578.png 610 | whucd_06441.png 611 | whucd_01411.png 612 | whucd_03727.png 613 | whucd_04737.png 614 | whucd_04187.png 615 | whucd_00991.png 616 | whucd_00693.png 617 | whucd_04433.png 618 | whucd_00772.png 619 | whucd_05112.png 620 | whucd_06539.png 621 | whucd_01563.png 622 | whucd_04872.png 623 | whucd_06149.png 624 | whucd_01331.png 625 | whucd_05313.png 626 | whucd_04595.png 627 | whucd_06915.png 628 | whucd_03756.png 629 | whucd_04699.png 630 | whucd_06017.png 631 | whucd_01166.png 632 | whucd_02758.png 633 | whucd_05004.png 634 | whucd_05895.png 635 | whucd_01410.png 636 | whucd_00547.png 637 | whucd_03052.png 638 | whucd_03726.png 639 | whucd_04005.png 640 | whucd_03852.png 641 | whucd_00567.png 642 | whucd_04467.png 643 | whucd_04875.png 644 | whucd_03494.png 645 | whucd_04357.png 646 | whucd_03724.png 647 | whucd_05500.png 648 | whucd_02091.png 649 | whucd_03049.png 650 | whucd_04000.png 651 | whucd_03529.png 652 | whucd_01185.png 653 | whucd_04222.png 654 | whucd_03962.png 655 | whucd_04597.png 656 | whucd_04697.png 657 | whucd_04621.png 658 | whucd_05368.png 659 | whucd_04741.png 660 | whucd_05719.png 661 | whucd_00308.png 662 | whucd_02682.png 663 | whucd_02827.png 664 | whucd_06962.png 665 | whucd_05694.png 666 | whucd_00434.png 667 | whucd_04423.png 668 | whucd_04114.png 669 | whucd_03744.png 670 | whucd_06892.png 671 | whucd_07144.png 672 | whucd_01723.png 673 | whucd_02192.png 674 | whucd_02986.png 675 | whucd_00421.png 676 | whucd_05819.png 677 | whucd_03865.png 678 | whucd_01292.png 679 | whucd_06898.png 680 | whucd_00423.png 681 | whucd_02346.png 682 | whucd_01415.png 683 | whucd_00485.png 684 | whucd_03491.png 685 | whucd_04944.png 686 | whucd_07071.png 687 | whucd_06791.png 688 | whucd_02286.png 689 | whucd_05427.png 690 | whucd_04219.png 691 | whucd_02605.png 692 | whucd_00370.png 693 | whucd_06273.png 694 | whucd_04978.png 695 | whucd_05228.png 696 | whucd_06023.png 697 | whucd_04120.png 698 | whucd_03715.png 699 | whucd_06369.png 700 | whucd_06896.png 701 | whucd_02587.png 702 | whucd_03673.png 703 | whucd_05116.png 704 | whucd_04179.png 705 | whucd_01334.png 706 | whucd_05315.png 707 | whucd_05257.png 708 | whucd_05444.png 709 | whucd_07356.png 710 | whucd_05890.png 711 | whucd_00435.png 712 | whucd_00486.png 713 | whucd_07274.png 714 | whucd_01597.png 715 | whucd_05121.png 716 | whucd_00356.png 717 | whucd_05574.png 718 | whucd_05867.png 719 | whucd_01379.png 720 | whucd_06541.png 721 | whucd_04202.png 722 | whucd_06663.png 723 | whucd_06251.png 724 | whucd_06772.png 725 | whucd_07174.png 726 | whucd_03157.png 727 | whucd_05416.png 728 | whucd_00221.png 729 | whucd_03075.png 730 | whucd_06124.png 731 | whucd_01090.png 732 | whucd_05195.png 733 | whucd_00746.png 734 | whucd_06150.png 735 | whucd_07072.png 736 | whucd_07018.png 737 | whucd_04067.png 738 | whucd_02866.png 739 | whucd_05470.png 740 | whucd_06068.png 741 | whucd_03977.png 742 | whucd_06384.png 743 | whucd_06190.png 744 | whucd_03956.png 745 | whucd_07387.png 746 | whucd_02423.png 747 | whucd_02859.png 748 | whucd_04232.png 749 | whucd_05370.png 750 | whucd_00318.png 751 | whucd_04725.png 752 | whucd_00944.png 753 | whucd_03747.png 754 | whucd_07138.png 755 | whucd_06377.png 756 | whucd_06503.png 757 | whucd_00570.png 758 | whucd_02310.png 759 | whucd_02447.png 760 | whucd_04217.png 761 | whucd_02136.png 762 | whucd_06595.png 763 | whucd_04852.png 764 | whucd_04566.png 765 | whucd_07038.png 766 | whucd_05304.png 767 | whucd_06117.png 768 | whucd_00334.png 769 | whucd_05113.png 770 | whucd_02272.png 771 | whucd_04862.png 772 | whucd_02262.png 773 | whucd_06555.png 774 | whucd_05768.png 775 | whucd_01467.png 776 | whucd_06623.png 777 | whucd_02123.png 778 | whucd_05699.png 779 | whucd_02927.png 780 | whucd_02993.png 781 | whucd_06426.png 782 | whucd_01172.png 783 | whucd_05247.png 784 | whucd_06708.png 785 | whucd_03883.png 786 | whucd_01286.png 787 | whucd_04743.png 788 | whucd_00181.png 789 | whucd_04302.png 790 | whucd_04131.png 791 | whucd_03371.png 792 | whucd_03162.png 793 | whucd_02479.png 794 | whucd_07013.png 795 | whucd_01474.png 796 | whucd_04101.png 797 | whucd_06450.png 798 | whucd_02118.png 799 | whucd_02949.png 800 | whucd_05364.png 801 | whucd_04867.png 802 | whucd_06529.png 803 | whucd_02675.png 804 | whucd_04721.png 805 | whucd_02603.png 806 | whucd_02353.png 807 | whucd_01014.png 808 | whucd_04095.png 809 | whucd_03407.png 810 | whucd_05944.png 811 | whucd_02548.png 812 | whucd_03531.png 813 | whucd_06270.png 814 | whucd_06837.png 815 | whucd_04358.png 816 | whucd_06778.png 817 | whucd_05236.png 818 | whucd_07035.png 819 | whucd_00618.png 820 | whucd_01993.png 821 | whucd_06698.png 822 | whucd_02090.png 823 | whucd_02735.png 824 | whucd_06018.png 825 | whucd_03066.png 826 | whucd_02858.png 827 | whucd_07087.png 828 | whucd_01896.png 829 | whucd_07042.png 830 | whucd_06521.png 831 | whucd_05498.png 832 | whucd_01158.png 833 | whucd_01123.png 834 | whucd_04477.png 835 | whucd_04498.png 836 | whucd_06508.png 837 | whucd_06274.png 838 | whucd_05814.png 839 | whucd_00440.png 840 | whucd_02731.png 841 | whucd_03619.png 842 | whucd_02431.png 843 | whucd_01214.png 844 | whucd_04098.png 845 | whucd_00202.png 846 | whucd_02729.png 847 | whucd_03842.png 848 | whucd_02795.png 849 | whucd_06253.png 850 | whucd_06498.png 851 | whucd_00724.png 852 | whucd_07044.png 853 | whucd_04931.png 854 | whucd_06809.png 855 | whucd_06899.png 856 | whucd_00708.png 857 | whucd_02855.png 858 | whucd_03874.png 859 | whucd_00866.png 860 | whucd_05309.png 861 | whucd_00585.png 862 | whucd_03854.png 863 | whucd_01171.png 864 | whucd_06442.png 865 | whucd_06373.png 866 | whucd_03240.png 867 | whucd_05998.png 868 | whucd_06537.png 869 | whucd_07178.png 870 | whucd_01202.png 871 | whucd_06897.png 872 | whucd_03470.png 873 | whucd_01596.png 874 | whucd_02429.png 875 | whucd_05991.png 876 | whucd_03596.png 877 | whucd_04623.png 878 | whucd_02653.png 879 | whucd_03315.png 880 | whucd_02011.png 881 | whucd_02977.png 882 | whucd_02184.png 883 | whucd_06901.png 884 | whucd_02104.png 885 | whucd_04992.png 886 | whucd_00822.png 887 | whucd_02992.png 888 | whucd_01416.png 889 | whucd_04102.png 890 | whucd_01437.png 891 | whucd_03339.png 892 | whucd_02856.png 893 | whucd_02189.png 894 | whucd_02120.png 895 | whucd_01319.png 896 | whucd_04616.png 897 | whucd_04352.png 898 | whucd_06304.png 899 | whucd_02873.png 900 | whucd_00883.png 901 | whucd_05765.png 902 | whucd_07334.png 903 | whucd_03242.png 904 | whucd_03718.png 905 | whucd_02165.png 906 | whucd_02217.png 907 | whucd_04368.png 908 | whucd_02305.png 909 | whucd_03600.png 910 | whucd_00770.png 911 | whucd_00481.png 912 | whucd_04350.png 913 | whucd_04594.png 914 | whucd_03662.png 915 | whucd_07278.png 916 | whucd_05871.png 917 | whucd_00827.png 918 | whucd_06275.png 919 | whucd_02688.png 920 | whucd_00538.png 921 | whucd_03323.png 922 | whucd_07166.png 923 | whucd_00155.png 924 | whucd_02733.png 925 | whucd_04113.png 926 | whucd_00823.png 927 | whucd_02460.png 928 | whucd_00454.png 929 | whucd_06368.png 930 | whucd_03215.png 931 | whucd_03408.png 932 | whucd_00188.png 933 | whucd_06414.png 934 | whucd_06430.png 935 | whucd_03503.png 936 | whucd_03367.png 937 | whucd_04751.png 938 | whucd_03745.png 939 | whucd_04077.png 940 | whucd_03196.png 941 | whucd_05693.png 942 | whucd_05621.png 943 | whucd_02863.png 944 | whucd_03721.png 945 | whucd_03343.png 946 | whucd_03722.png 947 | whucd_07200.png 948 | whucd_07325.png 949 | whucd_06119.png 950 | whucd_03126.png 951 | whucd_05246.png 952 | whucd_07416.png 953 | whucd_05439.png 954 | whucd_00699.png 955 | whucd_06448.png 956 | whucd_00445.png 957 | whucd_02183.png 958 | whucd_07007.png 959 | whucd_03366.png 960 | whucd_04227.png 961 | whucd_05320.png 962 | whucd_06343.png 963 | whucd_04353.png 964 | whucd_00499.png 965 | whucd_05598.png 966 | whucd_04203.png 967 | whucd_02602.png 968 | whucd_06518.png 969 | whucd_02127.png 970 | whucd_00853.png 971 | whucd_04351.png 972 | whucd_02982.png 973 | whucd_04976.png 974 | whucd_03459.png 975 | whucd_01426.png 976 | whucd_00548.png 977 | whucd_04877.png 978 | whucd_01473.png 979 | whucd_00709.png 980 | whucd_06078.png 981 | whucd_03847.png 982 | whucd_00444.png 983 | whucd_00410.png 984 | whucd_03786.png 985 | whucd_07160.png 986 | whucd_07023.png 987 | whucd_05291.png 988 | whucd_07327.png 989 | whucd_03781.png 990 | whucd_06371.png 991 | whucd_05131.png 992 | whucd_04940.png 993 | whucd_05360.png 994 | whucd_05363.png 995 | whucd_03587.png 996 | whucd_07070.png 997 | whucd_07139.png 998 | whucd_02798.png 999 | whucd_01828.png 1000 | whucd_05186.png 1001 | whucd_01305.png 1002 | whucd_03676.png 1003 | whucd_03243.png 1004 | whucd_04105.png 1005 | whucd_02882.png 1006 | whucd_00316.png 1007 | whucd_01037.png 1008 | whucd_02785.png 1009 | whucd_07391.png 1010 | whucd_05516.png 1011 | whucd_02044.png 1012 | whucd_03544.png 1013 | whucd_00946.png 1014 | whucd_01859.png 1015 | whucd_05106.png 1016 | whucd_07127.png 1017 | whucd_00851.png 1018 | whucd_06648.png 1019 | whucd_01739.png 1020 | whucd_03800.png 1021 | whucd_03802.png 1022 | whucd_05102.png 1023 | whucd_03891.png 1024 | whucd_06320.png 1025 | whucd_05876.png 1026 | whucd_04878.png 1027 | whucd_02245.png 1028 | whucd_04240.png 1029 | whucd_01860.png 1030 | whucd_01892.png 1031 | whucd_01089.png 1032 | whucd_00909.png 1033 | whucd_04093.png 1034 | whucd_03216.png 1035 | whucd_01189.png 1036 | whucd_02932.png 1037 | whucd_04372.png 1038 | whucd_01469.png 1039 | whucd_04237.png 1040 | whucd_02097.png 1041 | whucd_03468.png 1042 | whucd_01344.png 1043 | whucd_04499.png 1044 | whucd_01314.png 1045 | whucd_03658.png 1046 | whucd_04177.png 1047 | whucd_07291.png 1048 | whucd_04853.png 1049 | whucd_07180.png 1050 | whucd_02050.png 1051 | whucd_02297.png 1052 | whucd_03804.png 1053 | whucd_04567.png 1054 | whucd_05250.png 1055 | whucd_00639.png 1056 | whucd_03449.png 1057 | whucd_00817.png 1058 | whucd_00354.png 1059 | whucd_04017.png 1060 | whucd_06889.png 1061 | whucd_07273.png 1062 | whucd_00475.png 1063 | whucd_05816.png 1064 | whucd_06121.png 1065 | whucd_04869.png 1066 | whucd_02737.png 1067 | whucd_02555.png 1068 | whucd_02295.png 1069 | whucd_02851.png 1070 | whucd_04997.png 1071 | whucd_02169.png 1072 | whucd_02950.png 1073 | whucd_00565.png 1074 | whucd_03281.png 1075 | whucd_06628.png 1076 | whucd_04501.png 1077 | whucd_02397.png 1078 | whucd_02349.png 1079 | whucd_07026.png 1080 | whucd_03970.png 1081 | whucd_06750.png 1082 | whucd_02801.png 1083 | whucd_05735.png 1084 | whucd_03584.png 1085 | whucd_03793.png 1086 | whucd_05231.png 1087 | whucd_04692.png 1088 | whucd_04685.png 1089 | whucd_05875.png 1090 | whucd_02119.png 1091 | whucd_03599.png 1092 | whucd_04739.png 1093 | whucd_04303.png 1094 | whucd_04603.png 1095 | whucd_04740.png 1096 | whucd_01513.png 1097 | whucd_02732.png 1098 | whucd_01915.png 1099 | whucd_02862.png 1100 | whucd_00546.png 1101 | whucd_06662.png 1102 | whucd_01505.png 1103 | whucd_00338.png 1104 | whucd_06265.png 1105 | whucd_05241.png 1106 | whucd_02556.png 1107 | whucd_01341.png 1108 | whucd_06370.png 1109 | whucd_03790.png 1110 | whucd_05820.png 1111 | whucd_06242.png 1112 | whucd_04241.png 1113 | whucd_01423.png 1114 | whucd_02294.png 1115 | whucd_01005.png 1116 | whucd_00523.png 1117 | whucd_06396.png 1118 | whucd_06222.png 1119 | whucd_07037.png 1120 | whucd_05118.png 1121 | whucd_06070.png 1122 | whucd_00220.png 1123 | whucd_02166.png 1124 | whucd_01709.png 1125 | whucd_02805.png 1126 | whucd_02673.png 1127 | whucd_03841.png 1128 | whucd_03878.png 1129 | whucd_00629.png 1130 | whucd_00228.png 1131 | whucd_03239.png 1132 | whucd_00700.png 1133 | whucd_02351.png 1134 | whucd_04627.png 1135 | whucd_03495.png 1136 | whucd_02922.png 1137 | whucd_00101.png 1138 | whucd_02748.png 1139 | whucd_01063.png 1140 | whucd_04818.png 1141 | whucd_01103.png 1142 | whucd_04103.png 1143 | whucd_03911.png 1144 | whucd_06140.png 1145 | whucd_04297.png 1146 | whucd_02712.png 1147 | whucd_01977.png 1148 | whucd_06916.png 1149 | whucd_01113.png 1150 | whucd_06751.png 1151 | whucd_04610.png 1152 | whucd_03497.png 1153 | whucd_04995.png 1154 | whucd_00820.png 1155 | whucd_07292.png 1156 | whucd_02803.png 1157 | whucd_05492.png 1158 | whucd_05767.png 1159 | whucd_02728.png 1160 | whucd_02228.png 1161 | whucd_02787.png 1162 | whucd_06191.png 1163 | whucd_05342.png 1164 | whucd_06667.png 1165 | whucd_03741.png 1166 | whucd_02040.png 1167 | whucd_05256.png 1168 | whucd_02921.png 1169 | whucd_02759.png 1170 | whucd_04354.png 1171 | whucd_06398.png 1172 | whucd_06123.png 1173 | whucd_00024.png 1174 | whucd_03506.png 1175 | whucd_05230.png 1176 | whucd_05358.png 1177 | whucd_05355.png 1178 | whucd_03757.png 1179 | whucd_04238.png 1180 | whucd_03492.png 1181 | whucd_05679.png 1182 | whucd_02681.png 1183 | whucd_06433.png 1184 | whucd_02668.png 1185 | whucd_01458.png 1186 | whucd_02105.png 1187 | whucd_06381.png 1188 | whucd_04052.png 1189 | whucd_06523.png 1190 | whucd_03853.png 1191 | whucd_01336.png 1192 | whucd_03195.png 1193 | whucd_07268.png 1194 | whucd_00498.png 1195 | whucd_05374.png 1196 | whucd_00464.png 1197 | whucd_01323.png 1198 | whucd_05184.png 1199 | whucd_01866.png 1200 | whucd_04307.png 1201 | whucd_03845.png 1202 | whucd_02669.png 1203 | whucd_01571.png 1204 | whucd_06749.png 1205 | whucd_06227.png 1206 | whucd_07021.png 1207 | whucd_06683.png 1208 | whucd_03537.png 1209 | whucd_04495.png 1210 | whucd_01680.png 1211 | whucd_04720.png 1212 | whucd_05832.png 1213 | whucd_02160.png 1214 | whucd_04092.png 1215 | whucd_02445.png 1216 | whucd_06946.png 1217 | whucd_06374.png 1218 | whucd_01345.png 1219 | whucd_05132.png 1220 | whucd_04876.png 1221 | whucd_02168.png 1222 | whucd_00575.png 1223 | whucd_01168.png 1224 | whucd_04781.png 1225 | whucd_05941.png 1226 | whucd_01304.png 1227 | whucd_01346.png 1228 | whucd_01387.png 1229 | whucd_07414.png 1230 | whucd_00194.png 1231 | whucd_03967.png 1232 | whucd_05809.png 1233 | whucd_06432.png 1234 | whucd_07293.png 1235 | whucd_00599.png 1236 | whucd_00959.png 1237 | whucd_04346.png 1238 | whucd_06764.png 1239 | whucd_04242.png 1240 | whucd_02550.png 1241 | whucd_06664.png 1242 | whucd_02318.png 1243 | whucd_05229.png 1244 | whucd_06378.png 1245 | whucd_00713.png 1246 | whucd_01671.png 1247 | whucd_03919.png 1248 | whucd_05243.png 1249 | whucd_01944.png 1250 | whucd_04736.png 1251 | whucd_03629.png 1252 | whucd_04861.png 1253 | whucd_01581.png 1254 | whucd_04343.png 1255 | whucd_03280.png 1256 | whucd_05996.png 1257 | whucd_02865.png 1258 | whucd_04604.png 1259 | whucd_03595.png 1260 | whucd_00628.png 1261 | whucd_01347.png 1262 | whucd_02004.png 1263 | whucd_03586.png 1264 | whucd_03235.png 1265 | whucd_03850.png 1266 | whucd_03991.png 1267 | whucd_07195.png 1268 | whucd_00504.png 1269 | whucd_02185.png 1270 | whucd_06022.png 1271 | whucd_04986.png 1272 | whucd_05120.png 1273 | whucd_03589.png 1274 | whucd_04857.png 1275 | whucd_05114.png 1276 | whucd_04031.png 1277 | whucd_00697.png 1278 | whucd_03785.png 1279 | whucd_02988.png 1280 | whucd_07307.png 1281 | whucd_07288.png 1282 | whucd_04224.png 1283 | whucd_01508.png 1284 | whucd_02472.png 1285 | whucd_01340.png 1286 | whucd_01335.png 1287 | whucd_07415.png 1288 | whucd_07418.png 1289 | whucd_00050.png 1290 | whucd_04622.png 1291 | whucd_05490.png 1292 | whucd_05642.png 1293 | whucd_01940.png 1294 | whucd_01288.png 1295 | whucd_00461.png 1296 | whucd_02738.png 1297 | whucd_06016.png 1298 | whucd_03993.png 1299 | whucd_01285.png 1300 | whucd_02991.png 1301 | whucd_05749.png 1302 | whucd_06755.png 1303 | whucd_04239.png 1304 | whucd_05442.png 1305 | whucd_01215.png 1306 | whucd_06118.png 1307 | whucd_04860.png 1308 | whucd_06919.png 1309 | whucd_04246.png 1310 | whucd_02674.png 1311 | whucd_01511.png 1312 | whucd_00455.png 1313 | whucd_01187.png 1314 | whucd_00463.png 1315 | whucd_03678.png 1316 | whucd_03072.png 1317 | whucd_06947.png 1318 | whucd_05486.png 1319 | whucd_05864.png 1320 | whucd_00642.png 1321 | whucd_03618.png 1322 | whucd_05622.png 1323 | whucd_06902.png 1324 | whucd_04690.png 1325 | whucd_01118.png 1326 | whucd_05002.png 1327 | whucd_06592.png 1328 | whucd_00569.png 1329 | whucd_04104.png 1330 | whucd_03601.png 1331 | whucd_01141.png 1332 | whucd_00783.png 1333 | whucd_01329.png 1334 | whucd_04374.png 1335 | whucd_05678.png 1336 | whucd_07198.png 1337 | whucd_03602.png 1338 | whucd_01112.png 1339 | whucd_00436.png 1340 | whucd_01414.png 1341 | whucd_03905.png 1342 | whucd_01432.png 1343 | whucd_01297.png 1344 | whucd_02654.png 1345 | whucd_03838.png 1346 | whucd_02092.png 1347 | whucd_05987.png 1348 | whucd_04564.png 1349 | whucd_00286.png 1350 | whucd_03179.png 1351 | whucd_07054.png 1352 | whucd_03504.png 1353 | whucd_04726.png 1354 | whucd_01332.png 1355 | whucd_06088.png 1356 | whucd_01507.png 1357 | whucd_05815.png 1358 | whucd_02828.png 1359 | whucd_07320.png 1360 | whucd_05618.png 1361 | whucd_06793.png 1362 | -------------------------------------------------------------------------------- /data/WHU-CD-List/20_train_supervised.txt: -------------------------------------------------------------------------------- 1 | whucd_02547.png 2 | whucd_05868.png 3 | whucd_00871.png 4 | whucd_07001.png 5 | whucd_04331.png 6 | whucd_04228.png 7 | whucd_06246.png 8 | whucd_00630.png 9 | whucd_04199.png 10 | whucd_05344.png 11 | whucd_03872.png 12 | whucd_04598.png 13 | whucd_07147.png 14 | whucd_03283.png 15 | whucd_01681.png 16 | whucd_02180.png 17 | whucd_02309.png 18 | whucd_01313.png 19 | whucd_01438.png 20 | whucd_03711.png 21 | whucd_02864.png 22 | whucd_05053.png 23 | whucd_00738.png 24 | whucd_01140.png 25 | whucd_05244.png 26 | whucd_02540.png 27 | whucd_07290.png 28 | whucd_06515.png 29 | whucd_06065.png 30 | whucd_04359.png 31 | whucd_00076.png 32 | whucd_03974.png 33 | whucd_03150.png 34 | whucd_05995.png 35 | whucd_05258.png 36 | whucd_03620.png 37 | whucd_01722.png 38 | whucd_05123.png 39 | whucd_02224.png 40 | whucd_00142.png 41 | whucd_07335.png 42 | whucd_06469.png 43 | whucd_01092.png 44 | whucd_06397.png 45 | whucd_00361.png 46 | whucd_03534.png 47 | whucd_06881.png 48 | whucd_03846.png 49 | whucd_07276.png 50 | whucd_02978.png 51 | whucd_00574.png 52 | whucd_03907.png 53 | whucd_01850.png 54 | whucd_04125.png 55 | whucd_02663.png 56 | whucd_06647.png 57 | whucd_02711.png 58 | whucd_02480.png 59 | whucd_05992.png 60 | whucd_06413.png 61 | whucd_03505.png 62 | whucd_00443.png 63 | whucd_07392.png 64 | whucd_04111.png 65 | whucd_03340.png 66 | whucd_07039.png 67 | whucd_06895.png 68 | whucd_04094.png 69 | whucd_03466.png 70 | whucd_03867.png 71 | whucd_01065.png 72 | whucd_00694.png 73 | whucd_03042.png 74 | whucd_05891.png 75 | whucd_03103.png 76 | whucd_00187.png 77 | whucd_06271.png 78 | whucd_02566.png 79 | whucd_06682.png 80 | whucd_04553.png 81 | whucd_04126.png 82 | whucd_00818.png 83 | whucd_04479.png 84 | whucd_00948.png 85 | whucd_01101.png 86 | whucd_07326.png 87 | whucd_02800.png 88 | whucd_00784.png 89 | whucd_07322.png 90 | whucd_04233.png 91 | whucd_06380.png 92 | whucd_01088.png 93 | whucd_01911.png 94 | whucd_07040.png 95 | whucd_03986.png 96 | whucd_04130.png 97 | whucd_05489.png 98 | whucd_00064.png 99 | whucd_01300.png 100 | whucd_06266.png 101 | whucd_03801.png 102 | whucd_00838.png 103 | whucd_03725.png 104 | whucd_05310.png 105 | whucd_01945.png 106 | whucd_04170.png 107 | whucd_00564.png 108 | whucd_00524.png 109 | whucd_02138.png 110 | whucd_03104.png 111 | whucd_06500.png 112 | whucd_03979.png 113 | whucd_06504.png 114 | whucd_03279.png 115 | whucd_04868.png 116 | whucd_00437.png 117 | whucd_03851.png 118 | whucd_04112.png 119 | whucd_04117.png 120 | whucd_04369.png 121 | whucd_04493.png 122 | whucd_03927.png 123 | whucd_07146.png 124 | whucd_07412.png 125 | whucd_03043.png 126 | whucd_03153.png 127 | whucd_05861.png 128 | whucd_04121.png 129 | whucd_03748.png 130 | whucd_06903.png 131 | whucd_00867.png 132 | whucd_03114.png 133 | whucd_02524.png 134 | whucd_06333.png 135 | whucd_01093.png 136 | whucd_06556.png 137 | whucd_05107.png 138 | whucd_05441.png 139 | whucd_04863.png 140 | whucd_00840.png 141 | whucd_02985.png 142 | whucd_04329.png 143 | whucd_04727.png 144 | whucd_07167.png 145 | whucd_03425.png 146 | whucd_05620.png 147 | whucd_00109.png 148 | whucd_00392.png 149 | whucd_03238.png 150 | whucd_04724.png 151 | whucd_06467.png 152 | whucd_02903.png 153 | whucd_02251.png 154 | whucd_06225.png 155 | whucd_04473.png 156 | whucd_00198.png 157 | whucd_04096.png 158 | whucd_06788.png 159 | whucd_00483.png 160 | whucd_03792.png 161 | whucd_05110.png 162 | whucd_03791.png 163 | whucd_03117.png 164 | whucd_01864.png 165 | whucd_03155.png 166 | whucd_05365.png 167 | whucd_03837.png 168 | whucd_04927.png 169 | whucd_04226.png 170 | whucd_03964.png 171 | whucd_06316.png 172 | whucd_02230.png 173 | whucd_07402.png 174 | whucd_04980.png 175 | whucd_05217.png 176 | whucd_00949.png 177 | whucd_04859.png 178 | whucd_03976.png 179 | whucd_03843.png 180 | whucd_05874.png 181 | whucd_00235.png 182 | whucd_06020.png 183 | whucd_01298.png 184 | whucd_06394.png 185 | whucd_03180.png 186 | whucd_03316.png 187 | whucd_05218.png 188 | whucd_00231.png 189 | whucd_03289.png 190 | whucd_00482.png 191 | whucd_00571.png 192 | whucd_04993.png 193 | whucd_02987.png 194 | whucd_03593.png 195 | whucd_03866.png 196 | whucd_03713.png 197 | whucd_00190.png 198 | whucd_04738.png 199 | whucd_04983.png 200 | whucd_00141.png 201 | whucd_03424.png 202 | whucd_06189.png 203 | whucd_03720.png 204 | whucd_07149.png 205 | whucd_01450.png 206 | whucd_04485.png 207 | whucd_04723.png 208 | whucd_04851.png 209 | whucd_04618.png 210 | whucd_05433.png 211 | whucd_04206.png 212 | whucd_01217.png 213 | whucd_06307.png 214 | whucd_07142.png 215 | whucd_06890.png 216 | whucd_07152.png 217 | whucd_01079.png 218 | whucd_07321.png 219 | whucd_01165.png 220 | whucd_05723.png 221 | whucd_04310.png 222 | whucd_01506.png 223 | whucd_03284.png 224 | whucd_05305.png 225 | whucd_05553.png 226 | whucd_04091.png 227 | whucd_05248.png 228 | whucd_01324.png 229 | whucd_03999.png 230 | whucd_06272.png 231 | whucd_03530.png 232 | whucd_00227.png 233 | whucd_03579.png 234 | whucd_03493.png 235 | whucd_06875.png 236 | whucd_01164.png 237 | whucd_06912.png 238 | whucd_01912.png 239 | whucd_05128.png 240 | whucd_06910.png 241 | whucd_05234.png 242 | whucd_02354.png 243 | whucd_04691.png 244 | whucd_06254.png 245 | whucd_04216.png 246 | whucd_05133.png 247 | whucd_06666.png 248 | whucd_01861.png 249 | whucd_01431.png 250 | whucd_06268.png 251 | whucd_04849.png 252 | whucd_01127.png 253 | whucd_03112.png 254 | whucd_05001.png 255 | whucd_04619.png 256 | whucd_07394.png 257 | whucd_03475.png 258 | whucd_03657.png 259 | whucd_05122.png 260 | whucd_01421.png 261 | whucd_06045.png 262 | whucd_02018.png 263 | whucd_06576.png 264 | whucd_05362.png 265 | whucd_04611.png 266 | whucd_06400.png 267 | whucd_05494.png 268 | whucd_00429.png 269 | whucd_01509.png 270 | whucd_05314.png 271 | whucd_05831.png 272 | whucd_06148.png 273 | whucd_01380.png 274 | whucd_05369.png 275 | whucd_06922.png 276 | whucd_01204.png 277 | whucd_07137.png 278 | whucd_05237.png 279 | whucd_07269.png 280 | whucd_04994.png 281 | whucd_01798.png 282 | whucd_03712.png 283 | whucd_06021.png 284 | whucd_03677.png 285 | whucd_07017.png 286 | whucd_05000.png 287 | whucd_06376.png 288 | whucd_06315.png 289 | whucd_00422.png 290 | whucd_00412.png 291 | whucd_02036.png 292 | whucd_05623.png 293 | whucd_04614.png 294 | whucd_04045.png 295 | whucd_02872.png 296 | whucd_06243.png 297 | whucd_05239.png 298 | whucd_07041.png 299 | whucd_02983.png 300 | whucd_06382.png 301 | whucd_06770.png 302 | whucd_05367.png 303 | -------------------------------------------------------------------------------- /data/WHU-CD-List/20_train_unsupervised.txt: -------------------------------------------------------------------------------- 1 | whucd_05619.png 2 | whucd_03538.png 3 | whucd_03848.png 4 | whucd_01598.png 5 | whucd_07006.png 6 | whucd_00812.png 7 | whucd_00942.png 8 | whucd_04854.png 9 | whucd_00128.png 10 | whucd_02884.png 11 | whucd_04753.png 12 | whucd_04987.png 13 | whucd_06880.png 14 | whucd_00150.png 15 | whucd_00171.png 16 | whucd_01119.png 17 | whucd_03474.png 18 | whucd_00068.png 19 | whucd_03915.png 20 | whucd_04348.png 21 | whucd_05894.png 22 | whucd_01091.png 23 | whucd_03914.png 24 | whucd_02730.png 25 | whucd_03789.png 26 | whucd_06643.png 27 | whucd_01209.png 28 | whucd_02523.png 29 | whucd_03966.png 30 | whucd_01436.png 31 | whucd_00382.png 32 | whucd_01019.png 33 | whucd_06095.png 34 | whucd_06116.png 35 | whucd_03189.png 36 | whucd_00153.png 37 | whucd_03663.png 38 | whucd_00359.png 39 | whucd_05429.png 40 | whucd_00677.png 41 | whucd_07267.png 42 | whucd_00339.png 43 | whucd_01865.png 44 | whucd_02990.png 45 | whucd_06629.png 46 | whucd_01041.png 47 | whucd_07194.png 48 | whucd_05499.png 49 | whucd_00297.png 50 | whucd_07074.png 51 | whucd_01186.png 52 | whucd_06115.png 53 | whucd_01923.png 54 | whucd_02319.png 55 | whucd_04313.png 56 | whucd_03110.png 57 | whucd_01572.png 58 | whucd_00191.png 59 | whucd_00201.png 60 | whucd_06652.png 61 | whucd_05862.png 62 | whucd_04988.png 63 | whucd_03163.png 64 | whucd_02796.png 65 | whucd_02948.png 66 | whucd_06383.png 67 | whucd_00744.png 68 | whucd_03659.png 69 | whucd_04985.png 70 | whucd_05700.png 71 | whucd_07002.png 72 | whucd_02409.png 73 | whucd_06785.png 74 | whucd_05376.png 75 | whucd_01062.png 76 | whucd_00950.png 77 | whucd_02102.png 78 | whucd_02857.png 79 | whucd_05366.png 80 | whucd_02382.png 81 | whucd_04874.png 82 | whucd_04218.png 83 | whucd_02378.png 84 | whucd_03183.png 85 | whucd_01180.png 86 | whucd_03469.png 87 | whucd_02598.png 88 | whucd_04247.png 89 | whucd_05108.png 90 | whucd_06323.png 91 | whucd_00051.png 92 | whucd_05054.png 93 | whucd_03972.png 94 | whucd_03901.png 95 | whucd_00691.png 96 | whucd_02135.png 97 | whucd_01997.png 98 | whucd_04599.png 99 | whucd_03597.png 100 | whucd_03788.png 101 | whucd_01117.png 102 | whucd_00549.png 103 | whucd_06401.png 104 | whucd_05372.png 105 | whucd_06480.png 106 | whucd_05375.png 107 | whucd_07128.png 108 | whucd_00627.png 109 | whucd_06891.png 110 | whucd_03963.png 111 | whucd_05390.png 112 | whucd_04100.png 113 | whucd_01009.png 114 | whucd_03622.png 115 | whucd_00353.png 116 | whucd_00638.png 117 | whucd_00572.png 118 | whucd_04996.png 119 | whucd_03965.png 120 | whucd_06711.png 121 | whucd_04223.png 122 | whucd_01188.png 123 | whucd_06322.png 124 | whucd_00203.png 125 | whucd_00391.png 126 | whucd_01310.png 127 | whucd_03552.png 128 | whucd_06790.png 129 | whucd_01169.png 130 | whucd_02829.png 131 | whucd_00420.png 132 | whucd_06143.png 133 | whucd_05748.png 134 | whucd_00062.png 135 | whucd_01032.png 136 | whucd_02143.png 137 | whucd_03827.png 138 | whucd_05036.png 139 | whucd_03115.png 140 | whucd_01039.png 141 | whucd_07159.png 142 | whucd_03074.png 143 | whucd_06538.png 144 | whucd_00347.png 145 | whucd_04602.png 146 | whucd_05706.png 147 | whucd_00517.png 148 | whucd_07161.png 149 | whucd_00502.png 150 | whucd_04612.png 151 | whucd_03403.png 152 | whucd_00393.png 153 | whucd_05580.png 154 | whucd_06717.png 155 | whucd_07305.png 156 | whucd_05242.png 157 | whucd_03406.png 158 | whucd_06451.png 159 | whucd_05897.png 160 | whucd_06876.png 161 | whucd_00163.png 162 | whucd_07213.png 163 | whucd_05111.png 164 | whucd_02383.png 165 | whucd_03955.png 166 | whucd_01218.png 167 | whucd_03632.png 168 | whucd_01034.png 169 | whucd_04468.png 170 | whucd_06372.png 171 | whucd_06718.png 172 | whucd_04234.png 173 | whucd_00924.png 174 | whucd_07022.png 175 | whucd_03844.png 176 | whucd_00879.png 177 | whucd_07287.png 178 | whucd_05115.png 179 | whucd_01178.png 180 | whucd_05468.png 181 | whucd_03621.png 182 | whucd_02012.png 183 | whucd_07143.png 184 | whucd_07196.png 185 | whucd_04115.png 186 | whucd_06917.png 187 | whucd_04053.png 188 | whucd_05496.png 189 | whucd_00085.png 190 | whucd_05443.png 191 | whucd_01849.png 192 | whucd_06501.png 193 | whucd_07162.png 194 | whucd_01867.png 195 | whucd_07389.png 196 | whucd_03728.png 197 | whucd_04243.png 198 | whucd_03154.png 199 | whucd_04439.png 200 | whucd_03660.png 201 | whucd_05990.png 202 | whucd_03588.png 203 | whucd_06319.png 204 | whucd_04975.png 205 | whucd_00566.png 206 | whucd_01290.png 207 | whucd_02071.png 208 | whucd_00910.png 209 | whucd_05698.png 210 | whucd_04347.png 211 | whucd_03994.png 212 | whucd_03067.png 213 | whucd_04998.png 214 | whucd_01060.png 215 | whucd_06341.png 216 | whucd_04484.png 217 | whucd_07043.png 218 | whucd_04728.png 219 | whucd_00887.png 220 | whucd_00110.png 221 | whucd_03124.png 222 | whucd_07199.png 223 | whucd_04593.png 224 | whucd_01430.png 225 | whucd_06502.png 226 | whucd_03839.png 227 | whucd_05485.png 228 | whucd_01343.png 229 | whucd_01976.png 230 | whucd_00657.png 231 | whucd_00587.png 232 | whucd_00965.png 233 | whucd_05124.png 234 | whucd_04865.png 235 | whucd_04328.png 236 | whucd_07003.png 237 | whucd_02586.png 238 | whucd_03710.png 239 | whucd_07036.png 240 | whucd_01219.png 241 | whucd_06559.png 242 | whucd_04248.png 243 | whucd_06787.png 244 | whucd_06591.png 245 | whucd_01159.png 246 | whucd_01547.png 247 | whucd_07069.png 248 | whucd_06824.png 249 | whucd_04839.png 250 | whucd_04565.png 251 | whucd_03585.png 252 | whucd_00751.png 253 | whucd_01220.png 254 | whucd_02837.png 255 | whucd_02379.png 256 | whucd_03116.png 257 | whucd_00195.png 258 | whucd_02320.png 259 | whucd_03803.png 260 | whucd_06792.png 261 | whucd_00743.png 262 | whucd_05445.png 263 | whucd_00346.png 264 | whucd_01672.png 265 | whucd_05860.png 266 | whucd_04039.png 267 | whucd_06882.png 268 | whucd_05166.png 269 | whucd_05686.png 270 | whucd_06911.png 271 | whucd_05660.png 272 | whucd_01190.png 273 | whucd_04991.png 274 | whucd_05869.png 275 | whucd_00335.png 276 | whucd_05235.png 277 | whucd_01296.png 278 | whucd_04494.png 279 | whucd_01383.png 280 | whucd_04221.png 281 | whucd_03960.png 282 | whucd_02883.png 283 | whucd_02861.png 284 | whucd_04474.png 285 | whucd_07275.png 286 | whucd_02350.png 287 | whucd_04342.png 288 | whucd_00539.png 289 | whucd_04220.png 290 | whucd_03666.png 291 | whucd_04999.png 292 | whucd_04951.png 293 | whucd_07075.png 294 | whucd_05738.png 295 | whucd_04106.png 296 | whucd_01293.png 297 | whucd_05109.png 298 | whucd_07048.png 299 | whucd_07289.png 300 | whucd_02677.png 301 | whucd_05259.png 302 | whucd_05487.png 303 | whucd_00022.png 304 | whucd_03551.png 305 | whucd_01924.png 306 | whucd_05689.png 307 | whucd_04027.png 308 | whucd_04989.png 309 | whucd_07417.png 310 | whucd_01471.png 311 | whucd_04811.png 312 | whucd_02852.png 313 | whucd_00332.png 314 | whucd_00413.png 315 | whucd_06403.png 316 | whucd_02881.png 317 | whucd_03902.png 318 | whucd_06867.png 319 | whucd_07265.png 320 | whucd_03918.png 321 | whucd_06789.png 322 | whucd_04626.png 323 | whucd_03041.png 324 | whucd_04977.png 325 | whucd_07140.png 326 | whucd_00771.png 327 | whucd_02137.png 328 | whucd_03460.png 329 | whucd_07386.png 330 | whucd_04847.png 331 | whucd_07390.png 332 | whucd_05704.png 333 | whucd_03111.png 334 | whucd_03314.png 335 | whucd_06624.png 336 | whucd_03740.png 337 | whucd_06255.png 338 | whucd_06913.png 339 | whucd_03674.png 340 | whucd_00256.png 341 | whucd_07148.png 342 | whucd_02441.png 343 | whucd_04873.png 344 | whucd_04966.png 345 | whucd_03912.png 346 | whucd_04245.png 347 | whucd_02923.png 348 | whucd_00309.png 349 | whucd_02665.png 350 | whucd_04469.png 351 | whucd_03476.png 352 | whucd_02128.png 353 | whucd_02989.png 354 | whucd_02038.png 355 | whucd_02984.png 356 | whucd_06499.png 357 | whucd_05066.png 358 | whucd_04850.png 359 | whucd_04476.png 360 | whucd_01038.png 361 | whucd_04309.png 362 | whucd_07255.png 363 | whucd_00503.png 364 | whucd_04840.png 365 | whucd_03926.png 366 | whucd_03113.png 367 | whucd_06665.png 368 | whucd_02567.png 369 | whucd_05240.png 370 | whucd_06655.png 371 | whucd_04752.png 372 | whucd_01306.png 373 | whucd_01562.png 374 | whucd_07393.png 375 | whucd_02396.png 376 | whucd_05491.png 377 | whucd_00819.png 378 | whucd_05130.png 379 | whucd_03990.png 380 | whucd_02622.png 381 | whucd_02106.png 382 | whucd_00586.png 383 | whucd_00029.png 384 | whucd_03706.png 385 | whucd_06784.png 386 | whucd_01295.png 387 | whucd_04735.png 388 | whucd_03723.png 389 | whucd_04487.png 390 | whucd_07266.png 391 | whucd_00333.png 392 | whucd_03969.png 393 | whucd_04001.png 394 | whucd_01181.png 395 | whucd_04625.png 396 | whucd_04559.png 397 | whucd_03716.png 398 | whucd_05517.png 399 | whucd_03053.png 400 | whucd_05865.png 401 | whucd_05371.png 402 | whucd_00189.png 403 | whucd_04178.png 404 | whucd_04344.png 405 | whucd_05373.png 406 | whucd_02739.png 407 | whucd_04722.png 408 | whucd_03995.png 409 | whucd_06528.png 410 | whucd_05232.png 411 | whucd_01010.png 412 | whucd_05687.png 413 | whucd_00712.png 414 | whucd_03342.png 415 | whucd_06375.png 416 | whucd_07024.png 417 | whucd_07286.png 418 | whucd_06626.png 419 | whucd_07254.png 420 | whucd_01740.png 421 | whucd_05863.png 422 | whucd_01468.png 423 | whucd_00303.png 424 | whucd_05753.png 425 | whucd_02713.png 426 | whucd_01446.png 427 | whucd_02745.png 428 | whucd_06918.png 429 | whucd_01294.png 430 | whucd_01975.png 431 | whucd_02726.png 432 | whucd_06914.png 433 | whucd_01229.png 434 | whucd_00598.png 435 | whucd_01470.png 436 | whucd_00745.png 437 | whucd_00947.png 438 | whucd_06774.png 439 | whucd_02103.png 440 | whucd_02218.png 441 | whucd_07163.png 442 | whucd_03278.png 443 | whucd_05769.png 444 | whucd_03992.png 445 | whucd_02315.png 446 | whucd_04028.png 447 | whucd_01059.png 448 | whucd_05057.png 449 | whucd_01160.png 450 | whucd_02725.png 451 | whucd_05940.png 452 | whucd_00218.png 453 | whucd_06306.png 454 | whucd_07129.png 455 | whucd_00362.png 456 | whucd_04229.png 457 | whucd_03913.png 458 | whucd_05578.png 459 | whucd_06441.png 460 | whucd_01411.png 461 | whucd_03727.png 462 | whucd_04737.png 463 | whucd_04187.png 464 | whucd_00991.png 465 | whucd_00693.png 466 | whucd_04433.png 467 | whucd_00772.png 468 | whucd_05112.png 469 | whucd_06539.png 470 | whucd_01563.png 471 | whucd_04872.png 472 | whucd_06149.png 473 | whucd_01331.png 474 | whucd_05313.png 475 | whucd_04595.png 476 | whucd_06915.png 477 | whucd_03756.png 478 | whucd_04699.png 479 | whucd_06017.png 480 | whucd_01166.png 481 | whucd_02758.png 482 | whucd_05004.png 483 | whucd_05895.png 484 | whucd_01410.png 485 | whucd_00547.png 486 | whucd_03052.png 487 | whucd_03726.png 488 | whucd_04005.png 489 | whucd_03852.png 490 | whucd_00567.png 491 | whucd_04467.png 492 | whucd_04875.png 493 | whucd_03494.png 494 | whucd_04357.png 495 | whucd_03724.png 496 | whucd_05500.png 497 | whucd_02091.png 498 | whucd_03049.png 499 | whucd_04000.png 500 | whucd_03529.png 501 | whucd_01185.png 502 | whucd_04222.png 503 | whucd_03962.png 504 | whucd_04597.png 505 | whucd_04697.png 506 | whucd_04621.png 507 | whucd_05368.png 508 | whucd_04741.png 509 | whucd_05719.png 510 | whucd_00308.png 511 | whucd_02682.png 512 | whucd_02827.png 513 | whucd_06962.png 514 | whucd_05694.png 515 | whucd_00434.png 516 | whucd_04423.png 517 | whucd_04114.png 518 | whucd_03744.png 519 | whucd_06892.png 520 | whucd_07144.png 521 | whucd_01723.png 522 | whucd_02192.png 523 | whucd_02986.png 524 | whucd_00421.png 525 | whucd_05819.png 526 | whucd_03865.png 527 | whucd_01292.png 528 | whucd_06898.png 529 | whucd_00423.png 530 | whucd_02346.png 531 | whucd_01415.png 532 | whucd_00485.png 533 | whucd_03491.png 534 | whucd_04944.png 535 | whucd_07071.png 536 | whucd_06791.png 537 | whucd_02286.png 538 | whucd_05427.png 539 | whucd_04219.png 540 | whucd_02605.png 541 | whucd_00370.png 542 | whucd_06273.png 543 | whucd_04978.png 544 | whucd_05228.png 545 | whucd_06023.png 546 | whucd_04120.png 547 | whucd_03715.png 548 | whucd_06369.png 549 | whucd_06896.png 550 | whucd_02587.png 551 | whucd_03673.png 552 | whucd_05116.png 553 | whucd_04179.png 554 | whucd_01334.png 555 | whucd_05315.png 556 | whucd_05257.png 557 | whucd_05444.png 558 | whucd_07356.png 559 | whucd_05890.png 560 | whucd_00435.png 561 | whucd_00486.png 562 | whucd_07274.png 563 | whucd_01597.png 564 | whucd_05121.png 565 | whucd_00356.png 566 | whucd_05574.png 567 | whucd_05867.png 568 | whucd_01379.png 569 | whucd_06541.png 570 | whucd_04202.png 571 | whucd_06663.png 572 | whucd_06251.png 573 | whucd_06772.png 574 | whucd_07174.png 575 | whucd_03157.png 576 | whucd_05416.png 577 | whucd_00221.png 578 | whucd_03075.png 579 | whucd_06124.png 580 | whucd_01090.png 581 | whucd_05195.png 582 | whucd_00746.png 583 | whucd_06150.png 584 | whucd_07072.png 585 | whucd_07018.png 586 | whucd_04067.png 587 | whucd_02866.png 588 | whucd_05470.png 589 | whucd_06068.png 590 | whucd_03977.png 591 | whucd_06384.png 592 | whucd_06190.png 593 | whucd_03956.png 594 | whucd_07387.png 595 | whucd_02423.png 596 | whucd_02859.png 597 | whucd_04232.png 598 | whucd_05370.png 599 | whucd_00318.png 600 | whucd_04725.png 601 | whucd_00944.png 602 | whucd_03747.png 603 | whucd_07138.png 604 | whucd_06377.png 605 | whucd_06503.png 606 | whucd_00570.png 607 | whucd_02310.png 608 | whucd_02447.png 609 | whucd_04217.png 610 | whucd_02136.png 611 | whucd_06595.png 612 | whucd_04852.png 613 | whucd_04566.png 614 | whucd_07038.png 615 | whucd_05304.png 616 | whucd_06117.png 617 | whucd_00334.png 618 | whucd_05113.png 619 | whucd_02272.png 620 | whucd_04862.png 621 | whucd_02262.png 622 | whucd_06555.png 623 | whucd_05768.png 624 | whucd_01467.png 625 | whucd_06623.png 626 | whucd_02123.png 627 | whucd_05699.png 628 | whucd_02927.png 629 | whucd_02993.png 630 | whucd_06426.png 631 | whucd_01172.png 632 | whucd_05247.png 633 | whucd_06708.png 634 | whucd_03883.png 635 | whucd_01286.png 636 | whucd_04743.png 637 | whucd_00181.png 638 | whucd_04302.png 639 | whucd_04131.png 640 | whucd_03371.png 641 | whucd_03162.png 642 | whucd_02479.png 643 | whucd_07013.png 644 | whucd_01474.png 645 | whucd_04101.png 646 | whucd_06450.png 647 | whucd_02118.png 648 | whucd_02949.png 649 | whucd_05364.png 650 | whucd_04867.png 651 | whucd_06529.png 652 | whucd_02675.png 653 | whucd_04721.png 654 | whucd_02603.png 655 | whucd_02353.png 656 | whucd_01014.png 657 | whucd_04095.png 658 | whucd_03407.png 659 | whucd_05944.png 660 | whucd_02548.png 661 | whucd_03531.png 662 | whucd_06270.png 663 | whucd_06837.png 664 | whucd_04358.png 665 | whucd_06778.png 666 | whucd_05236.png 667 | whucd_07035.png 668 | whucd_00618.png 669 | whucd_01993.png 670 | whucd_06698.png 671 | whucd_02090.png 672 | whucd_02735.png 673 | whucd_06018.png 674 | whucd_03066.png 675 | whucd_02858.png 676 | whucd_07087.png 677 | whucd_01896.png 678 | whucd_07042.png 679 | whucd_06521.png 680 | whucd_05498.png 681 | whucd_01158.png 682 | whucd_01123.png 683 | whucd_04477.png 684 | whucd_04498.png 685 | whucd_06508.png 686 | whucd_06274.png 687 | whucd_05814.png 688 | whucd_00440.png 689 | whucd_02731.png 690 | whucd_03619.png 691 | whucd_02431.png 692 | whucd_01214.png 693 | whucd_04098.png 694 | whucd_00202.png 695 | whucd_02729.png 696 | whucd_03842.png 697 | whucd_02795.png 698 | whucd_06253.png 699 | whucd_06498.png 700 | whucd_00724.png 701 | whucd_07044.png 702 | whucd_04931.png 703 | whucd_06809.png 704 | whucd_06899.png 705 | whucd_00708.png 706 | whucd_02855.png 707 | whucd_03874.png 708 | whucd_00866.png 709 | whucd_05309.png 710 | whucd_00585.png 711 | whucd_03854.png 712 | whucd_01171.png 713 | whucd_06442.png 714 | whucd_06373.png 715 | whucd_03240.png 716 | whucd_05998.png 717 | whucd_06537.png 718 | whucd_07178.png 719 | whucd_01202.png 720 | whucd_06897.png 721 | whucd_03470.png 722 | whucd_01596.png 723 | whucd_02429.png 724 | whucd_05991.png 725 | whucd_03596.png 726 | whucd_04623.png 727 | whucd_02653.png 728 | whucd_03315.png 729 | whucd_02011.png 730 | whucd_02977.png 731 | whucd_02184.png 732 | whucd_06901.png 733 | whucd_02104.png 734 | whucd_04992.png 735 | whucd_00822.png 736 | whucd_02992.png 737 | whucd_01416.png 738 | whucd_04102.png 739 | whucd_01437.png 740 | whucd_03339.png 741 | whucd_02856.png 742 | whucd_02189.png 743 | whucd_02120.png 744 | whucd_01319.png 745 | whucd_04616.png 746 | whucd_04352.png 747 | whucd_06304.png 748 | whucd_02873.png 749 | whucd_00883.png 750 | whucd_05765.png 751 | whucd_07334.png 752 | whucd_03242.png 753 | whucd_03718.png 754 | whucd_02165.png 755 | whucd_02217.png 756 | whucd_04368.png 757 | whucd_02305.png 758 | whucd_03600.png 759 | whucd_00770.png 760 | whucd_00481.png 761 | whucd_04350.png 762 | whucd_04594.png 763 | whucd_03662.png 764 | whucd_07278.png 765 | whucd_05871.png 766 | whucd_00827.png 767 | whucd_06275.png 768 | whucd_02688.png 769 | whucd_00538.png 770 | whucd_03323.png 771 | whucd_07166.png 772 | whucd_00155.png 773 | whucd_02733.png 774 | whucd_04113.png 775 | whucd_00823.png 776 | whucd_02460.png 777 | whucd_00454.png 778 | whucd_06368.png 779 | whucd_03215.png 780 | whucd_03408.png 781 | whucd_00188.png 782 | whucd_06414.png 783 | whucd_06430.png 784 | whucd_03503.png 785 | whucd_03367.png 786 | whucd_04751.png 787 | whucd_03745.png 788 | whucd_04077.png 789 | whucd_03196.png 790 | whucd_05693.png 791 | whucd_05621.png 792 | whucd_02863.png 793 | whucd_03721.png 794 | whucd_03343.png 795 | whucd_03722.png 796 | whucd_07200.png 797 | whucd_07325.png 798 | whucd_06119.png 799 | whucd_03126.png 800 | whucd_05246.png 801 | whucd_07416.png 802 | whucd_05439.png 803 | whucd_00699.png 804 | whucd_06448.png 805 | whucd_00445.png 806 | whucd_02183.png 807 | whucd_07007.png 808 | whucd_03366.png 809 | whucd_04227.png 810 | whucd_05320.png 811 | whucd_06343.png 812 | whucd_04353.png 813 | whucd_00499.png 814 | whucd_05598.png 815 | whucd_04203.png 816 | whucd_02602.png 817 | whucd_06518.png 818 | whucd_02127.png 819 | whucd_00853.png 820 | whucd_04351.png 821 | whucd_02982.png 822 | whucd_04976.png 823 | whucd_03459.png 824 | whucd_01426.png 825 | whucd_00548.png 826 | whucd_04877.png 827 | whucd_01473.png 828 | whucd_00709.png 829 | whucd_06078.png 830 | whucd_03847.png 831 | whucd_00444.png 832 | whucd_00410.png 833 | whucd_03786.png 834 | whucd_07160.png 835 | whucd_07023.png 836 | whucd_05291.png 837 | whucd_07327.png 838 | whucd_03781.png 839 | whucd_06371.png 840 | whucd_05131.png 841 | whucd_04940.png 842 | whucd_05360.png 843 | whucd_05363.png 844 | whucd_03587.png 845 | whucd_07070.png 846 | whucd_07139.png 847 | whucd_02798.png 848 | whucd_01828.png 849 | whucd_05186.png 850 | whucd_01305.png 851 | whucd_03676.png 852 | whucd_03243.png 853 | whucd_04105.png 854 | whucd_02882.png 855 | whucd_00316.png 856 | whucd_01037.png 857 | whucd_02785.png 858 | whucd_07391.png 859 | whucd_05516.png 860 | whucd_02044.png 861 | whucd_03544.png 862 | whucd_00946.png 863 | whucd_01859.png 864 | whucd_05106.png 865 | whucd_07127.png 866 | whucd_00851.png 867 | whucd_06648.png 868 | whucd_01739.png 869 | whucd_03800.png 870 | whucd_03802.png 871 | whucd_05102.png 872 | whucd_03891.png 873 | whucd_06320.png 874 | whucd_05876.png 875 | whucd_04878.png 876 | whucd_02245.png 877 | whucd_04240.png 878 | whucd_01860.png 879 | whucd_01892.png 880 | whucd_01089.png 881 | whucd_00909.png 882 | whucd_04093.png 883 | whucd_03216.png 884 | whucd_01189.png 885 | whucd_02932.png 886 | whucd_04372.png 887 | whucd_01469.png 888 | whucd_04237.png 889 | whucd_02097.png 890 | whucd_03468.png 891 | whucd_01344.png 892 | whucd_04499.png 893 | whucd_01314.png 894 | whucd_03658.png 895 | whucd_04177.png 896 | whucd_07291.png 897 | whucd_04853.png 898 | whucd_07180.png 899 | whucd_02050.png 900 | whucd_02297.png 901 | whucd_03804.png 902 | whucd_04567.png 903 | whucd_05250.png 904 | whucd_00639.png 905 | whucd_03449.png 906 | whucd_00817.png 907 | whucd_00354.png 908 | whucd_04017.png 909 | whucd_06889.png 910 | whucd_07273.png 911 | whucd_00475.png 912 | whucd_05816.png 913 | whucd_06121.png 914 | whucd_04869.png 915 | whucd_02737.png 916 | whucd_02555.png 917 | whucd_02295.png 918 | whucd_02851.png 919 | whucd_04997.png 920 | whucd_02169.png 921 | whucd_02950.png 922 | whucd_00565.png 923 | whucd_03281.png 924 | whucd_06628.png 925 | whucd_04501.png 926 | whucd_02397.png 927 | whucd_02349.png 928 | whucd_07026.png 929 | whucd_03970.png 930 | whucd_06750.png 931 | whucd_02801.png 932 | whucd_05735.png 933 | whucd_03584.png 934 | whucd_03793.png 935 | whucd_05231.png 936 | whucd_04692.png 937 | whucd_04685.png 938 | whucd_05875.png 939 | whucd_02119.png 940 | whucd_03599.png 941 | whucd_04739.png 942 | whucd_04303.png 943 | whucd_04603.png 944 | whucd_04740.png 945 | whucd_01513.png 946 | whucd_02732.png 947 | whucd_01915.png 948 | whucd_02862.png 949 | whucd_00546.png 950 | whucd_06662.png 951 | whucd_01505.png 952 | whucd_00338.png 953 | whucd_06265.png 954 | whucd_05241.png 955 | whucd_02556.png 956 | whucd_01341.png 957 | whucd_06370.png 958 | whucd_03790.png 959 | whucd_05820.png 960 | whucd_06242.png 961 | whucd_04241.png 962 | whucd_01423.png 963 | whucd_02294.png 964 | whucd_01005.png 965 | whucd_00523.png 966 | whucd_06396.png 967 | whucd_06222.png 968 | whucd_07037.png 969 | whucd_05118.png 970 | whucd_06070.png 971 | whucd_00220.png 972 | whucd_02166.png 973 | whucd_01709.png 974 | whucd_02805.png 975 | whucd_02673.png 976 | whucd_03841.png 977 | whucd_03878.png 978 | whucd_00629.png 979 | whucd_00228.png 980 | whucd_03239.png 981 | whucd_00700.png 982 | whucd_02351.png 983 | whucd_04627.png 984 | whucd_03495.png 985 | whucd_02922.png 986 | whucd_00101.png 987 | whucd_02748.png 988 | whucd_01063.png 989 | whucd_04818.png 990 | whucd_01103.png 991 | whucd_04103.png 992 | whucd_03911.png 993 | whucd_06140.png 994 | whucd_04297.png 995 | whucd_02712.png 996 | whucd_01977.png 997 | whucd_06916.png 998 | whucd_01113.png 999 | whucd_06751.png 1000 | whucd_04610.png 1001 | whucd_03497.png 1002 | whucd_04995.png 1003 | whucd_00820.png 1004 | whucd_07292.png 1005 | whucd_02803.png 1006 | whucd_05492.png 1007 | whucd_05767.png 1008 | whucd_02728.png 1009 | whucd_02228.png 1010 | whucd_02787.png 1011 | whucd_06191.png 1012 | whucd_05342.png 1013 | whucd_06667.png 1014 | whucd_03741.png 1015 | whucd_02040.png 1016 | whucd_05256.png 1017 | whucd_02921.png 1018 | whucd_02759.png 1019 | whucd_04354.png 1020 | whucd_06398.png 1021 | whucd_06123.png 1022 | whucd_00024.png 1023 | whucd_03506.png 1024 | whucd_05230.png 1025 | whucd_05358.png 1026 | whucd_05355.png 1027 | whucd_03757.png 1028 | whucd_04238.png 1029 | whucd_03492.png 1030 | whucd_05679.png 1031 | whucd_02681.png 1032 | whucd_06433.png 1033 | whucd_02668.png 1034 | whucd_01458.png 1035 | whucd_02105.png 1036 | whucd_06381.png 1037 | whucd_04052.png 1038 | whucd_06523.png 1039 | whucd_03853.png 1040 | whucd_01336.png 1041 | whucd_03195.png 1042 | whucd_07268.png 1043 | whucd_00498.png 1044 | whucd_05374.png 1045 | whucd_00464.png 1046 | whucd_01323.png 1047 | whucd_05184.png 1048 | whucd_01866.png 1049 | whucd_04307.png 1050 | whucd_03845.png 1051 | whucd_02669.png 1052 | whucd_01571.png 1053 | whucd_06749.png 1054 | whucd_06227.png 1055 | whucd_07021.png 1056 | whucd_06683.png 1057 | whucd_03537.png 1058 | whucd_04495.png 1059 | whucd_01680.png 1060 | whucd_04720.png 1061 | whucd_05832.png 1062 | whucd_02160.png 1063 | whucd_04092.png 1064 | whucd_02445.png 1065 | whucd_06946.png 1066 | whucd_06374.png 1067 | whucd_01345.png 1068 | whucd_05132.png 1069 | whucd_04876.png 1070 | whucd_02168.png 1071 | whucd_00575.png 1072 | whucd_01168.png 1073 | whucd_04781.png 1074 | whucd_05941.png 1075 | whucd_01304.png 1076 | whucd_01346.png 1077 | whucd_01387.png 1078 | whucd_07414.png 1079 | whucd_00194.png 1080 | whucd_03967.png 1081 | whucd_05809.png 1082 | whucd_06432.png 1083 | whucd_07293.png 1084 | whucd_00599.png 1085 | whucd_00959.png 1086 | whucd_04346.png 1087 | whucd_06764.png 1088 | whucd_04242.png 1089 | whucd_02550.png 1090 | whucd_06664.png 1091 | whucd_02318.png 1092 | whucd_05229.png 1093 | whucd_06378.png 1094 | whucd_00713.png 1095 | whucd_01671.png 1096 | whucd_03919.png 1097 | whucd_05243.png 1098 | whucd_01944.png 1099 | whucd_04736.png 1100 | whucd_03629.png 1101 | whucd_04861.png 1102 | whucd_01581.png 1103 | whucd_04343.png 1104 | whucd_03280.png 1105 | whucd_05996.png 1106 | whucd_02865.png 1107 | whucd_04604.png 1108 | whucd_03595.png 1109 | whucd_00628.png 1110 | whucd_01347.png 1111 | whucd_02004.png 1112 | whucd_03586.png 1113 | whucd_03235.png 1114 | whucd_03850.png 1115 | whucd_03991.png 1116 | whucd_07195.png 1117 | whucd_00504.png 1118 | whucd_02185.png 1119 | whucd_06022.png 1120 | whucd_04986.png 1121 | whucd_05120.png 1122 | whucd_03589.png 1123 | whucd_04857.png 1124 | whucd_05114.png 1125 | whucd_04031.png 1126 | whucd_00697.png 1127 | whucd_03785.png 1128 | whucd_02988.png 1129 | whucd_07307.png 1130 | whucd_07288.png 1131 | whucd_04224.png 1132 | whucd_01508.png 1133 | whucd_02472.png 1134 | whucd_01340.png 1135 | whucd_01335.png 1136 | whucd_07415.png 1137 | whucd_07418.png 1138 | whucd_00050.png 1139 | whucd_04622.png 1140 | whucd_05490.png 1141 | whucd_05642.png 1142 | whucd_01940.png 1143 | whucd_01288.png 1144 | whucd_00461.png 1145 | whucd_02738.png 1146 | whucd_06016.png 1147 | whucd_03993.png 1148 | whucd_01285.png 1149 | whucd_02991.png 1150 | whucd_05749.png 1151 | whucd_06755.png 1152 | whucd_04239.png 1153 | whucd_05442.png 1154 | whucd_01215.png 1155 | whucd_06118.png 1156 | whucd_04860.png 1157 | whucd_06919.png 1158 | whucd_04246.png 1159 | whucd_02674.png 1160 | whucd_01511.png 1161 | whucd_00455.png 1162 | whucd_01187.png 1163 | whucd_00463.png 1164 | whucd_03678.png 1165 | whucd_03072.png 1166 | whucd_06947.png 1167 | whucd_05486.png 1168 | whucd_05864.png 1169 | whucd_00642.png 1170 | whucd_03618.png 1171 | whucd_05622.png 1172 | whucd_06902.png 1173 | whucd_04690.png 1174 | whucd_01118.png 1175 | whucd_05002.png 1176 | whucd_06592.png 1177 | whucd_00569.png 1178 | whucd_04104.png 1179 | whucd_03601.png 1180 | whucd_01141.png 1181 | whucd_00783.png 1182 | whucd_01329.png 1183 | whucd_04374.png 1184 | whucd_05678.png 1185 | whucd_07198.png 1186 | whucd_03602.png 1187 | whucd_01112.png 1188 | whucd_00436.png 1189 | whucd_01414.png 1190 | whucd_03905.png 1191 | whucd_01432.png 1192 | whucd_01297.png 1193 | whucd_02654.png 1194 | whucd_03838.png 1195 | whucd_02092.png 1196 | whucd_05987.png 1197 | whucd_04564.png 1198 | whucd_00286.png 1199 | whucd_03179.png 1200 | whucd_07054.png 1201 | whucd_03504.png 1202 | whucd_04726.png 1203 | whucd_01332.png 1204 | whucd_06088.png 1205 | whucd_01507.png 1206 | whucd_05815.png 1207 | whucd_02828.png 1208 | whucd_07320.png 1209 | whucd_05618.png 1210 | whucd_06793.png 1211 | -------------------------------------------------------------------------------- /data/WHU-CD-List/40_train_supervised.txt: -------------------------------------------------------------------------------- 1 | whucd_02547.png 2 | whucd_05868.png 3 | whucd_00871.png 4 | whucd_07001.png 5 | whucd_04331.png 6 | whucd_04228.png 7 | whucd_06246.png 8 | whucd_00630.png 9 | whucd_04199.png 10 | whucd_05344.png 11 | whucd_03872.png 12 | whucd_04598.png 13 | whucd_07147.png 14 | whucd_03283.png 15 | whucd_01681.png 16 | whucd_02180.png 17 | whucd_02309.png 18 | whucd_01313.png 19 | whucd_01438.png 20 | whucd_03711.png 21 | whucd_02864.png 22 | whucd_05053.png 23 | whucd_00738.png 24 | whucd_01140.png 25 | whucd_05244.png 26 | whucd_02540.png 27 | whucd_07290.png 28 | whucd_06515.png 29 | whucd_06065.png 30 | whucd_04359.png 31 | whucd_00076.png 32 | whucd_03974.png 33 | whucd_03150.png 34 | whucd_05995.png 35 | whucd_05258.png 36 | whucd_03620.png 37 | whucd_01722.png 38 | whucd_05123.png 39 | whucd_02224.png 40 | whucd_00142.png 41 | whucd_07335.png 42 | whucd_06469.png 43 | whucd_01092.png 44 | whucd_06397.png 45 | whucd_00361.png 46 | whucd_03534.png 47 | whucd_06881.png 48 | whucd_03846.png 49 | whucd_07276.png 50 | whucd_02978.png 51 | whucd_00574.png 52 | whucd_03907.png 53 | whucd_01850.png 54 | whucd_04125.png 55 | whucd_02663.png 56 | whucd_06647.png 57 | whucd_02711.png 58 | whucd_02480.png 59 | whucd_05992.png 60 | whucd_06413.png 61 | whucd_03505.png 62 | whucd_00443.png 63 | whucd_07392.png 64 | whucd_04111.png 65 | whucd_03340.png 66 | whucd_07039.png 67 | whucd_06895.png 68 | whucd_04094.png 69 | whucd_03466.png 70 | whucd_03867.png 71 | whucd_01065.png 72 | whucd_00694.png 73 | whucd_03042.png 74 | whucd_05891.png 75 | whucd_03103.png 76 | whucd_00187.png 77 | whucd_06271.png 78 | whucd_02566.png 79 | whucd_06682.png 80 | whucd_04553.png 81 | whucd_04126.png 82 | whucd_00818.png 83 | whucd_04479.png 84 | whucd_00948.png 85 | whucd_01101.png 86 | whucd_07326.png 87 | whucd_02800.png 88 | whucd_00784.png 89 | whucd_07322.png 90 | whucd_04233.png 91 | whucd_06380.png 92 | whucd_01088.png 93 | whucd_01911.png 94 | whucd_07040.png 95 | whucd_03986.png 96 | whucd_04130.png 97 | whucd_05489.png 98 | whucd_00064.png 99 | whucd_01300.png 100 | whucd_06266.png 101 | whucd_03801.png 102 | whucd_00838.png 103 | whucd_03725.png 104 | whucd_05310.png 105 | whucd_01945.png 106 | whucd_04170.png 107 | whucd_00564.png 108 | whucd_00524.png 109 | whucd_02138.png 110 | whucd_03104.png 111 | whucd_06500.png 112 | whucd_03979.png 113 | whucd_06504.png 114 | whucd_03279.png 115 | whucd_04868.png 116 | whucd_00437.png 117 | whucd_03851.png 118 | whucd_04112.png 119 | whucd_04117.png 120 | whucd_04369.png 121 | whucd_04493.png 122 | whucd_03927.png 123 | whucd_07146.png 124 | whucd_07412.png 125 | whucd_03043.png 126 | whucd_03153.png 127 | whucd_05861.png 128 | whucd_04121.png 129 | whucd_03748.png 130 | whucd_06903.png 131 | whucd_00867.png 132 | whucd_03114.png 133 | whucd_02524.png 134 | whucd_06333.png 135 | whucd_01093.png 136 | whucd_06556.png 137 | whucd_05107.png 138 | whucd_05441.png 139 | whucd_04863.png 140 | whucd_00840.png 141 | whucd_02985.png 142 | whucd_04329.png 143 | whucd_04727.png 144 | whucd_07167.png 145 | whucd_03425.png 146 | whucd_05620.png 147 | whucd_00109.png 148 | whucd_00392.png 149 | whucd_03238.png 150 | whucd_04724.png 151 | whucd_06467.png 152 | whucd_02903.png 153 | whucd_02251.png 154 | whucd_06225.png 155 | whucd_04473.png 156 | whucd_00198.png 157 | whucd_04096.png 158 | whucd_06788.png 159 | whucd_00483.png 160 | whucd_03792.png 161 | whucd_05110.png 162 | whucd_03791.png 163 | whucd_03117.png 164 | whucd_01864.png 165 | whucd_03155.png 166 | whucd_05365.png 167 | whucd_03837.png 168 | whucd_04927.png 169 | whucd_04226.png 170 | whucd_03964.png 171 | whucd_06316.png 172 | whucd_02230.png 173 | whucd_07402.png 174 | whucd_04980.png 175 | whucd_05217.png 176 | whucd_00949.png 177 | whucd_04859.png 178 | whucd_03976.png 179 | whucd_03843.png 180 | whucd_05874.png 181 | whucd_00235.png 182 | whucd_06020.png 183 | whucd_01298.png 184 | whucd_06394.png 185 | whucd_03180.png 186 | whucd_03316.png 187 | whucd_05218.png 188 | whucd_00231.png 189 | whucd_03289.png 190 | whucd_00482.png 191 | whucd_00571.png 192 | whucd_04993.png 193 | whucd_02987.png 194 | whucd_03593.png 195 | whucd_03866.png 196 | whucd_03713.png 197 | whucd_00190.png 198 | whucd_04738.png 199 | whucd_04983.png 200 | whucd_00141.png 201 | whucd_03424.png 202 | whucd_06189.png 203 | whucd_03720.png 204 | whucd_07149.png 205 | whucd_01450.png 206 | whucd_04485.png 207 | whucd_04723.png 208 | whucd_04851.png 209 | whucd_04618.png 210 | whucd_05433.png 211 | whucd_04206.png 212 | whucd_01217.png 213 | whucd_06307.png 214 | whucd_07142.png 215 | whucd_06890.png 216 | whucd_07152.png 217 | whucd_01079.png 218 | whucd_07321.png 219 | whucd_01165.png 220 | whucd_05723.png 221 | whucd_04310.png 222 | whucd_01506.png 223 | whucd_03284.png 224 | whucd_05305.png 225 | whucd_05553.png 226 | whucd_04091.png 227 | whucd_05248.png 228 | whucd_01324.png 229 | whucd_03999.png 230 | whucd_06272.png 231 | whucd_03530.png 232 | whucd_00227.png 233 | whucd_03579.png 234 | whucd_03493.png 235 | whucd_06875.png 236 | whucd_01164.png 237 | whucd_06912.png 238 | whucd_01912.png 239 | whucd_05128.png 240 | whucd_06910.png 241 | whucd_05234.png 242 | whucd_02354.png 243 | whucd_04691.png 244 | whucd_06254.png 245 | whucd_04216.png 246 | whucd_05133.png 247 | whucd_06666.png 248 | whucd_01861.png 249 | whucd_01431.png 250 | whucd_06268.png 251 | whucd_04849.png 252 | whucd_01127.png 253 | whucd_03112.png 254 | whucd_05001.png 255 | whucd_04619.png 256 | whucd_07394.png 257 | whucd_03475.png 258 | whucd_03657.png 259 | whucd_05122.png 260 | whucd_01421.png 261 | whucd_06045.png 262 | whucd_02018.png 263 | whucd_06576.png 264 | whucd_05362.png 265 | whucd_04611.png 266 | whucd_06400.png 267 | whucd_05494.png 268 | whucd_00429.png 269 | whucd_01509.png 270 | whucd_05314.png 271 | whucd_05831.png 272 | whucd_06148.png 273 | whucd_01380.png 274 | whucd_05369.png 275 | whucd_06922.png 276 | whucd_01204.png 277 | whucd_07137.png 278 | whucd_05237.png 279 | whucd_07269.png 280 | whucd_04994.png 281 | whucd_01798.png 282 | whucd_03712.png 283 | whucd_06021.png 284 | whucd_03677.png 285 | whucd_07017.png 286 | whucd_05000.png 287 | whucd_06376.png 288 | whucd_06315.png 289 | whucd_00422.png 290 | whucd_00412.png 291 | whucd_02036.png 292 | whucd_05623.png 293 | whucd_04614.png 294 | whucd_04045.png 295 | whucd_02872.png 296 | whucd_06243.png 297 | whucd_05239.png 298 | whucd_07041.png 299 | whucd_02983.png 300 | whucd_06382.png 301 | whucd_06770.png 302 | whucd_05367.png 303 | whucd_05619.png 304 | whucd_03538.png 305 | whucd_03848.png 306 | whucd_01598.png 307 | whucd_07006.png 308 | whucd_00812.png 309 | whucd_00942.png 310 | whucd_04854.png 311 | whucd_00128.png 312 | whucd_02884.png 313 | whucd_04753.png 314 | whucd_04987.png 315 | whucd_06880.png 316 | whucd_00150.png 317 | whucd_00171.png 318 | whucd_01119.png 319 | whucd_03474.png 320 | whucd_00068.png 321 | whucd_03915.png 322 | whucd_04348.png 323 | whucd_05894.png 324 | whucd_01091.png 325 | whucd_03914.png 326 | whucd_02730.png 327 | whucd_03789.png 328 | whucd_06643.png 329 | whucd_01209.png 330 | whucd_02523.png 331 | whucd_03966.png 332 | whucd_01436.png 333 | whucd_00382.png 334 | whucd_01019.png 335 | whucd_06095.png 336 | whucd_06116.png 337 | whucd_03189.png 338 | whucd_00153.png 339 | whucd_03663.png 340 | whucd_00359.png 341 | whucd_05429.png 342 | whucd_00677.png 343 | whucd_07267.png 344 | whucd_00339.png 345 | whucd_01865.png 346 | whucd_02990.png 347 | whucd_06629.png 348 | whucd_01041.png 349 | whucd_07194.png 350 | whucd_05499.png 351 | whucd_00297.png 352 | whucd_07074.png 353 | whucd_01186.png 354 | whucd_06115.png 355 | whucd_01923.png 356 | whucd_02319.png 357 | whucd_04313.png 358 | whucd_03110.png 359 | whucd_01572.png 360 | whucd_00191.png 361 | whucd_00201.png 362 | whucd_06652.png 363 | whucd_05862.png 364 | whucd_04988.png 365 | whucd_03163.png 366 | whucd_02796.png 367 | whucd_02948.png 368 | whucd_06383.png 369 | whucd_00744.png 370 | whucd_03659.png 371 | whucd_04985.png 372 | whucd_05700.png 373 | whucd_07002.png 374 | whucd_02409.png 375 | whucd_06785.png 376 | whucd_05376.png 377 | whucd_01062.png 378 | whucd_00950.png 379 | whucd_02102.png 380 | whucd_02857.png 381 | whucd_05366.png 382 | whucd_02382.png 383 | whucd_04874.png 384 | whucd_04218.png 385 | whucd_02378.png 386 | whucd_03183.png 387 | whucd_01180.png 388 | whucd_03469.png 389 | whucd_02598.png 390 | whucd_04247.png 391 | whucd_05108.png 392 | whucd_06323.png 393 | whucd_00051.png 394 | whucd_05054.png 395 | whucd_03972.png 396 | whucd_03901.png 397 | whucd_00691.png 398 | whucd_02135.png 399 | whucd_01997.png 400 | whucd_04599.png 401 | whucd_03597.png 402 | whucd_03788.png 403 | whucd_01117.png 404 | whucd_00549.png 405 | whucd_06401.png 406 | whucd_05372.png 407 | whucd_06480.png 408 | whucd_05375.png 409 | whucd_07128.png 410 | whucd_00627.png 411 | whucd_06891.png 412 | whucd_03963.png 413 | whucd_05390.png 414 | whucd_04100.png 415 | whucd_01009.png 416 | whucd_03622.png 417 | whucd_00353.png 418 | whucd_00638.png 419 | whucd_00572.png 420 | whucd_04996.png 421 | whucd_03965.png 422 | whucd_06711.png 423 | whucd_04223.png 424 | whucd_01188.png 425 | whucd_06322.png 426 | whucd_00203.png 427 | whucd_00391.png 428 | whucd_01310.png 429 | whucd_03552.png 430 | whucd_06790.png 431 | whucd_01169.png 432 | whucd_02829.png 433 | whucd_00420.png 434 | whucd_06143.png 435 | whucd_05748.png 436 | whucd_00062.png 437 | whucd_01032.png 438 | whucd_02143.png 439 | whucd_03827.png 440 | whucd_05036.png 441 | whucd_03115.png 442 | whucd_01039.png 443 | whucd_07159.png 444 | whucd_03074.png 445 | whucd_06538.png 446 | whucd_00347.png 447 | whucd_04602.png 448 | whucd_05706.png 449 | whucd_00517.png 450 | whucd_07161.png 451 | whucd_00502.png 452 | whucd_04612.png 453 | whucd_03403.png 454 | whucd_00393.png 455 | whucd_05580.png 456 | whucd_06717.png 457 | whucd_07305.png 458 | whucd_05242.png 459 | whucd_03406.png 460 | whucd_06451.png 461 | whucd_05897.png 462 | whucd_06876.png 463 | whucd_00163.png 464 | whucd_07213.png 465 | whucd_05111.png 466 | whucd_02383.png 467 | whucd_03955.png 468 | whucd_01218.png 469 | whucd_03632.png 470 | whucd_01034.png 471 | whucd_04468.png 472 | whucd_06372.png 473 | whucd_06718.png 474 | whucd_04234.png 475 | whucd_00924.png 476 | whucd_07022.png 477 | whucd_03844.png 478 | whucd_00879.png 479 | whucd_07287.png 480 | whucd_05115.png 481 | whucd_01178.png 482 | whucd_05468.png 483 | whucd_03621.png 484 | whucd_02012.png 485 | whucd_07143.png 486 | whucd_07196.png 487 | whucd_04115.png 488 | whucd_06917.png 489 | whucd_04053.png 490 | whucd_05496.png 491 | whucd_00085.png 492 | whucd_05443.png 493 | whucd_01849.png 494 | whucd_06501.png 495 | whucd_07162.png 496 | whucd_01867.png 497 | whucd_07389.png 498 | whucd_03728.png 499 | whucd_04243.png 500 | whucd_03154.png 501 | whucd_04439.png 502 | whucd_03660.png 503 | whucd_05990.png 504 | whucd_03588.png 505 | whucd_06319.png 506 | whucd_04975.png 507 | whucd_00566.png 508 | whucd_01290.png 509 | whucd_02071.png 510 | whucd_00910.png 511 | whucd_05698.png 512 | whucd_04347.png 513 | whucd_03994.png 514 | whucd_03067.png 515 | whucd_04998.png 516 | whucd_01060.png 517 | whucd_06341.png 518 | whucd_04484.png 519 | whucd_07043.png 520 | whucd_04728.png 521 | whucd_00887.png 522 | whucd_00110.png 523 | whucd_03124.png 524 | whucd_07199.png 525 | whucd_04593.png 526 | whucd_01430.png 527 | whucd_06502.png 528 | whucd_03839.png 529 | whucd_05485.png 530 | whucd_01343.png 531 | whucd_01976.png 532 | whucd_00657.png 533 | whucd_00587.png 534 | whucd_00965.png 535 | whucd_05124.png 536 | whucd_04865.png 537 | whucd_04328.png 538 | whucd_07003.png 539 | whucd_02586.png 540 | whucd_03710.png 541 | whucd_07036.png 542 | whucd_01219.png 543 | whucd_06559.png 544 | whucd_04248.png 545 | whucd_06787.png 546 | whucd_06591.png 547 | whucd_01159.png 548 | whucd_01547.png 549 | whucd_07069.png 550 | whucd_06824.png 551 | whucd_04839.png 552 | whucd_04565.png 553 | whucd_03585.png 554 | whucd_00751.png 555 | whucd_01220.png 556 | whucd_02837.png 557 | whucd_02379.png 558 | whucd_03116.png 559 | whucd_00195.png 560 | whucd_02320.png 561 | whucd_03803.png 562 | whucd_06792.png 563 | whucd_00743.png 564 | whucd_05445.png 565 | whucd_00346.png 566 | whucd_01672.png 567 | whucd_05860.png 568 | whucd_04039.png 569 | whucd_06882.png 570 | whucd_05166.png 571 | whucd_05686.png 572 | whucd_06911.png 573 | whucd_05660.png 574 | whucd_01190.png 575 | whucd_04991.png 576 | whucd_05869.png 577 | whucd_00335.png 578 | whucd_05235.png 579 | whucd_01296.png 580 | whucd_04494.png 581 | whucd_01383.png 582 | whucd_04221.png 583 | whucd_03960.png 584 | whucd_02883.png 585 | whucd_02861.png 586 | whucd_04474.png 587 | whucd_07275.png 588 | whucd_02350.png 589 | whucd_04342.png 590 | whucd_00539.png 591 | whucd_04220.png 592 | whucd_03666.png 593 | whucd_04999.png 594 | whucd_04951.png 595 | whucd_07075.png 596 | whucd_05738.png 597 | whucd_04106.png 598 | whucd_01293.png 599 | whucd_05109.png 600 | whucd_07048.png 601 | whucd_07289.png 602 | whucd_02677.png 603 | whucd_05259.png 604 | whucd_05487.png 605 | -------------------------------------------------------------------------------- /data/WHU-CD-List/40_train_unsupervised.txt: -------------------------------------------------------------------------------- 1 | whucd_00022.png 2 | whucd_03551.png 3 | whucd_01924.png 4 | whucd_05689.png 5 | whucd_04027.png 6 | whucd_04989.png 7 | whucd_07417.png 8 | whucd_01471.png 9 | whucd_04811.png 10 | whucd_02852.png 11 | whucd_00332.png 12 | whucd_00413.png 13 | whucd_06403.png 14 | whucd_02881.png 15 | whucd_03902.png 16 | whucd_06867.png 17 | whucd_07265.png 18 | whucd_03918.png 19 | whucd_06789.png 20 | whucd_04626.png 21 | whucd_03041.png 22 | whucd_04977.png 23 | whucd_07140.png 24 | whucd_00771.png 25 | whucd_02137.png 26 | whucd_03460.png 27 | whucd_07386.png 28 | whucd_04847.png 29 | whucd_07390.png 30 | whucd_05704.png 31 | whucd_03111.png 32 | whucd_03314.png 33 | whucd_06624.png 34 | whucd_03740.png 35 | whucd_06255.png 36 | whucd_06913.png 37 | whucd_03674.png 38 | whucd_00256.png 39 | whucd_07148.png 40 | whucd_02441.png 41 | whucd_04873.png 42 | whucd_04966.png 43 | whucd_03912.png 44 | whucd_04245.png 45 | whucd_02923.png 46 | whucd_00309.png 47 | whucd_02665.png 48 | whucd_04469.png 49 | whucd_03476.png 50 | whucd_02128.png 51 | whucd_02989.png 52 | whucd_02038.png 53 | whucd_02984.png 54 | whucd_06499.png 55 | whucd_05066.png 56 | whucd_04850.png 57 | whucd_04476.png 58 | whucd_01038.png 59 | whucd_04309.png 60 | whucd_07255.png 61 | whucd_00503.png 62 | whucd_04840.png 63 | whucd_03926.png 64 | whucd_03113.png 65 | whucd_06665.png 66 | whucd_02567.png 67 | whucd_05240.png 68 | whucd_06655.png 69 | whucd_04752.png 70 | whucd_01306.png 71 | whucd_01562.png 72 | whucd_07393.png 73 | whucd_02396.png 74 | whucd_05491.png 75 | whucd_00819.png 76 | whucd_05130.png 77 | whucd_03990.png 78 | whucd_02622.png 79 | whucd_02106.png 80 | whucd_00586.png 81 | whucd_00029.png 82 | whucd_03706.png 83 | whucd_06784.png 84 | whucd_01295.png 85 | whucd_04735.png 86 | whucd_03723.png 87 | whucd_04487.png 88 | whucd_07266.png 89 | whucd_00333.png 90 | whucd_03969.png 91 | whucd_04001.png 92 | whucd_01181.png 93 | whucd_04625.png 94 | whucd_04559.png 95 | whucd_03716.png 96 | whucd_05517.png 97 | whucd_03053.png 98 | whucd_05865.png 99 | whucd_05371.png 100 | whucd_00189.png 101 | whucd_04178.png 102 | whucd_04344.png 103 | whucd_05373.png 104 | whucd_02739.png 105 | whucd_04722.png 106 | whucd_03995.png 107 | whucd_06528.png 108 | whucd_05232.png 109 | whucd_01010.png 110 | whucd_05687.png 111 | whucd_00712.png 112 | whucd_03342.png 113 | whucd_06375.png 114 | whucd_07024.png 115 | whucd_07286.png 116 | whucd_06626.png 117 | whucd_07254.png 118 | whucd_01740.png 119 | whucd_05863.png 120 | whucd_01468.png 121 | whucd_00303.png 122 | whucd_05753.png 123 | whucd_02713.png 124 | whucd_01446.png 125 | whucd_02745.png 126 | whucd_06918.png 127 | whucd_01294.png 128 | whucd_01975.png 129 | whucd_02726.png 130 | whucd_06914.png 131 | whucd_01229.png 132 | whucd_00598.png 133 | whucd_01470.png 134 | whucd_00745.png 135 | whucd_00947.png 136 | whucd_06774.png 137 | whucd_02103.png 138 | whucd_02218.png 139 | whucd_07163.png 140 | whucd_03278.png 141 | whucd_05769.png 142 | whucd_03992.png 143 | whucd_02315.png 144 | whucd_04028.png 145 | whucd_01059.png 146 | whucd_05057.png 147 | whucd_01160.png 148 | whucd_02725.png 149 | whucd_05940.png 150 | whucd_00218.png 151 | whucd_06306.png 152 | whucd_07129.png 153 | whucd_00362.png 154 | whucd_04229.png 155 | whucd_03913.png 156 | whucd_05578.png 157 | whucd_06441.png 158 | whucd_01411.png 159 | whucd_03727.png 160 | whucd_04737.png 161 | whucd_04187.png 162 | whucd_00991.png 163 | whucd_00693.png 164 | whucd_04433.png 165 | whucd_00772.png 166 | whucd_05112.png 167 | whucd_06539.png 168 | whucd_01563.png 169 | whucd_04872.png 170 | whucd_06149.png 171 | whucd_01331.png 172 | whucd_05313.png 173 | whucd_04595.png 174 | whucd_06915.png 175 | whucd_03756.png 176 | whucd_04699.png 177 | whucd_06017.png 178 | whucd_01166.png 179 | whucd_02758.png 180 | whucd_05004.png 181 | whucd_05895.png 182 | whucd_01410.png 183 | whucd_00547.png 184 | whucd_03052.png 185 | whucd_03726.png 186 | whucd_04005.png 187 | whucd_03852.png 188 | whucd_00567.png 189 | whucd_04467.png 190 | whucd_04875.png 191 | whucd_03494.png 192 | whucd_04357.png 193 | whucd_03724.png 194 | whucd_05500.png 195 | whucd_02091.png 196 | whucd_03049.png 197 | whucd_04000.png 198 | whucd_03529.png 199 | whucd_01185.png 200 | whucd_04222.png 201 | whucd_03962.png 202 | whucd_04597.png 203 | whucd_04697.png 204 | whucd_04621.png 205 | whucd_05368.png 206 | whucd_04741.png 207 | whucd_05719.png 208 | whucd_00308.png 209 | whucd_02682.png 210 | whucd_02827.png 211 | whucd_06962.png 212 | whucd_05694.png 213 | whucd_00434.png 214 | whucd_04423.png 215 | whucd_04114.png 216 | whucd_03744.png 217 | whucd_06892.png 218 | whucd_07144.png 219 | whucd_01723.png 220 | whucd_02192.png 221 | whucd_02986.png 222 | whucd_00421.png 223 | whucd_05819.png 224 | whucd_03865.png 225 | whucd_01292.png 226 | whucd_06898.png 227 | whucd_00423.png 228 | whucd_02346.png 229 | whucd_01415.png 230 | whucd_00485.png 231 | whucd_03491.png 232 | whucd_04944.png 233 | whucd_07071.png 234 | whucd_06791.png 235 | whucd_02286.png 236 | whucd_05427.png 237 | whucd_04219.png 238 | whucd_02605.png 239 | whucd_00370.png 240 | whucd_06273.png 241 | whucd_04978.png 242 | whucd_05228.png 243 | whucd_06023.png 244 | whucd_04120.png 245 | whucd_03715.png 246 | whucd_06369.png 247 | whucd_06896.png 248 | whucd_02587.png 249 | whucd_03673.png 250 | whucd_05116.png 251 | whucd_04179.png 252 | whucd_01334.png 253 | whucd_05315.png 254 | whucd_05257.png 255 | whucd_05444.png 256 | whucd_07356.png 257 | whucd_05890.png 258 | whucd_00435.png 259 | whucd_00486.png 260 | whucd_07274.png 261 | whucd_01597.png 262 | whucd_05121.png 263 | whucd_00356.png 264 | whucd_05574.png 265 | whucd_05867.png 266 | whucd_01379.png 267 | whucd_06541.png 268 | whucd_04202.png 269 | whucd_06663.png 270 | whucd_06251.png 271 | whucd_06772.png 272 | whucd_07174.png 273 | whucd_03157.png 274 | whucd_05416.png 275 | whucd_00221.png 276 | whucd_03075.png 277 | whucd_06124.png 278 | whucd_01090.png 279 | whucd_05195.png 280 | whucd_00746.png 281 | whucd_06150.png 282 | whucd_07072.png 283 | whucd_07018.png 284 | whucd_04067.png 285 | whucd_02866.png 286 | whucd_05470.png 287 | whucd_06068.png 288 | whucd_03977.png 289 | whucd_06384.png 290 | whucd_06190.png 291 | whucd_03956.png 292 | whucd_07387.png 293 | whucd_02423.png 294 | whucd_02859.png 295 | whucd_04232.png 296 | whucd_05370.png 297 | whucd_00318.png 298 | whucd_04725.png 299 | whucd_00944.png 300 | whucd_03747.png 301 | whucd_07138.png 302 | whucd_06377.png 303 | whucd_06503.png 304 | whucd_00570.png 305 | whucd_02310.png 306 | whucd_02447.png 307 | whucd_04217.png 308 | whucd_02136.png 309 | whucd_06595.png 310 | whucd_04852.png 311 | whucd_04566.png 312 | whucd_07038.png 313 | whucd_05304.png 314 | whucd_06117.png 315 | whucd_00334.png 316 | whucd_05113.png 317 | whucd_02272.png 318 | whucd_04862.png 319 | whucd_02262.png 320 | whucd_06555.png 321 | whucd_05768.png 322 | whucd_01467.png 323 | whucd_06623.png 324 | whucd_02123.png 325 | whucd_05699.png 326 | whucd_02927.png 327 | whucd_02993.png 328 | whucd_06426.png 329 | whucd_01172.png 330 | whucd_05247.png 331 | whucd_06708.png 332 | whucd_03883.png 333 | whucd_01286.png 334 | whucd_04743.png 335 | whucd_00181.png 336 | whucd_04302.png 337 | whucd_04131.png 338 | whucd_03371.png 339 | whucd_03162.png 340 | whucd_02479.png 341 | whucd_07013.png 342 | whucd_01474.png 343 | whucd_04101.png 344 | whucd_06450.png 345 | whucd_02118.png 346 | whucd_02949.png 347 | whucd_05364.png 348 | whucd_04867.png 349 | whucd_06529.png 350 | whucd_02675.png 351 | whucd_04721.png 352 | whucd_02603.png 353 | whucd_02353.png 354 | whucd_01014.png 355 | whucd_04095.png 356 | whucd_03407.png 357 | whucd_05944.png 358 | whucd_02548.png 359 | whucd_03531.png 360 | whucd_06270.png 361 | whucd_06837.png 362 | whucd_04358.png 363 | whucd_06778.png 364 | whucd_05236.png 365 | whucd_07035.png 366 | whucd_00618.png 367 | whucd_01993.png 368 | whucd_06698.png 369 | whucd_02090.png 370 | whucd_02735.png 371 | whucd_06018.png 372 | whucd_03066.png 373 | whucd_02858.png 374 | whucd_07087.png 375 | whucd_01896.png 376 | whucd_07042.png 377 | whucd_06521.png 378 | whucd_05498.png 379 | whucd_01158.png 380 | whucd_01123.png 381 | whucd_04477.png 382 | whucd_04498.png 383 | whucd_06508.png 384 | whucd_06274.png 385 | whucd_05814.png 386 | whucd_00440.png 387 | whucd_02731.png 388 | whucd_03619.png 389 | whucd_02431.png 390 | whucd_01214.png 391 | whucd_04098.png 392 | whucd_00202.png 393 | whucd_02729.png 394 | whucd_03842.png 395 | whucd_02795.png 396 | whucd_06253.png 397 | whucd_06498.png 398 | whucd_00724.png 399 | whucd_07044.png 400 | whucd_04931.png 401 | whucd_06809.png 402 | whucd_06899.png 403 | whucd_00708.png 404 | whucd_02855.png 405 | whucd_03874.png 406 | whucd_00866.png 407 | whucd_05309.png 408 | whucd_00585.png 409 | whucd_03854.png 410 | whucd_01171.png 411 | whucd_06442.png 412 | whucd_06373.png 413 | whucd_03240.png 414 | whucd_05998.png 415 | whucd_06537.png 416 | whucd_07178.png 417 | whucd_01202.png 418 | whucd_06897.png 419 | whucd_03470.png 420 | whucd_01596.png 421 | whucd_02429.png 422 | whucd_05991.png 423 | whucd_03596.png 424 | whucd_04623.png 425 | whucd_02653.png 426 | whucd_03315.png 427 | whucd_02011.png 428 | whucd_02977.png 429 | whucd_02184.png 430 | whucd_06901.png 431 | whucd_02104.png 432 | whucd_04992.png 433 | whucd_00822.png 434 | whucd_02992.png 435 | whucd_01416.png 436 | whucd_04102.png 437 | whucd_01437.png 438 | whucd_03339.png 439 | whucd_02856.png 440 | whucd_02189.png 441 | whucd_02120.png 442 | whucd_01319.png 443 | whucd_04616.png 444 | whucd_04352.png 445 | whucd_06304.png 446 | whucd_02873.png 447 | whucd_00883.png 448 | whucd_05765.png 449 | whucd_07334.png 450 | whucd_03242.png 451 | whucd_03718.png 452 | whucd_02165.png 453 | whucd_02217.png 454 | whucd_04368.png 455 | whucd_02305.png 456 | whucd_03600.png 457 | whucd_00770.png 458 | whucd_00481.png 459 | whucd_04350.png 460 | whucd_04594.png 461 | whucd_03662.png 462 | whucd_07278.png 463 | whucd_05871.png 464 | whucd_00827.png 465 | whucd_06275.png 466 | whucd_02688.png 467 | whucd_00538.png 468 | whucd_03323.png 469 | whucd_07166.png 470 | whucd_00155.png 471 | whucd_02733.png 472 | whucd_04113.png 473 | whucd_00823.png 474 | whucd_02460.png 475 | whucd_00454.png 476 | whucd_06368.png 477 | whucd_03215.png 478 | whucd_03408.png 479 | whucd_00188.png 480 | whucd_06414.png 481 | whucd_06430.png 482 | whucd_03503.png 483 | whucd_03367.png 484 | whucd_04751.png 485 | whucd_03745.png 486 | whucd_04077.png 487 | whucd_03196.png 488 | whucd_05693.png 489 | whucd_05621.png 490 | whucd_02863.png 491 | whucd_03721.png 492 | whucd_03343.png 493 | whucd_03722.png 494 | whucd_07200.png 495 | whucd_07325.png 496 | whucd_06119.png 497 | whucd_03126.png 498 | whucd_05246.png 499 | whucd_07416.png 500 | whucd_05439.png 501 | whucd_00699.png 502 | whucd_06448.png 503 | whucd_00445.png 504 | whucd_02183.png 505 | whucd_07007.png 506 | whucd_03366.png 507 | whucd_04227.png 508 | whucd_05320.png 509 | whucd_06343.png 510 | whucd_04353.png 511 | whucd_00499.png 512 | whucd_05598.png 513 | whucd_04203.png 514 | whucd_02602.png 515 | whucd_06518.png 516 | whucd_02127.png 517 | whucd_00853.png 518 | whucd_04351.png 519 | whucd_02982.png 520 | whucd_04976.png 521 | whucd_03459.png 522 | whucd_01426.png 523 | whucd_00548.png 524 | whucd_04877.png 525 | whucd_01473.png 526 | whucd_00709.png 527 | whucd_06078.png 528 | whucd_03847.png 529 | whucd_00444.png 530 | whucd_00410.png 531 | whucd_03786.png 532 | whucd_07160.png 533 | whucd_07023.png 534 | whucd_05291.png 535 | whucd_07327.png 536 | whucd_03781.png 537 | whucd_06371.png 538 | whucd_05131.png 539 | whucd_04940.png 540 | whucd_05360.png 541 | whucd_05363.png 542 | whucd_03587.png 543 | whucd_07070.png 544 | whucd_07139.png 545 | whucd_02798.png 546 | whucd_01828.png 547 | whucd_05186.png 548 | whucd_01305.png 549 | whucd_03676.png 550 | whucd_03243.png 551 | whucd_04105.png 552 | whucd_02882.png 553 | whucd_00316.png 554 | whucd_01037.png 555 | whucd_02785.png 556 | whucd_07391.png 557 | whucd_05516.png 558 | whucd_02044.png 559 | whucd_03544.png 560 | whucd_00946.png 561 | whucd_01859.png 562 | whucd_05106.png 563 | whucd_07127.png 564 | whucd_00851.png 565 | whucd_06648.png 566 | whucd_01739.png 567 | whucd_03800.png 568 | whucd_03802.png 569 | whucd_05102.png 570 | whucd_03891.png 571 | whucd_06320.png 572 | whucd_05876.png 573 | whucd_04878.png 574 | whucd_02245.png 575 | whucd_04240.png 576 | whucd_01860.png 577 | whucd_01892.png 578 | whucd_01089.png 579 | whucd_00909.png 580 | whucd_04093.png 581 | whucd_03216.png 582 | whucd_01189.png 583 | whucd_02932.png 584 | whucd_04372.png 585 | whucd_01469.png 586 | whucd_04237.png 587 | whucd_02097.png 588 | whucd_03468.png 589 | whucd_01344.png 590 | whucd_04499.png 591 | whucd_01314.png 592 | whucd_03658.png 593 | whucd_04177.png 594 | whucd_07291.png 595 | whucd_04853.png 596 | whucd_07180.png 597 | whucd_02050.png 598 | whucd_02297.png 599 | whucd_03804.png 600 | whucd_04567.png 601 | whucd_05250.png 602 | whucd_00639.png 603 | whucd_03449.png 604 | whucd_00817.png 605 | whucd_00354.png 606 | whucd_04017.png 607 | whucd_06889.png 608 | whucd_07273.png 609 | whucd_00475.png 610 | whucd_05816.png 611 | whucd_06121.png 612 | whucd_04869.png 613 | whucd_02737.png 614 | whucd_02555.png 615 | whucd_02295.png 616 | whucd_02851.png 617 | whucd_04997.png 618 | whucd_02169.png 619 | whucd_02950.png 620 | whucd_00565.png 621 | whucd_03281.png 622 | whucd_06628.png 623 | whucd_04501.png 624 | whucd_02397.png 625 | whucd_02349.png 626 | whucd_07026.png 627 | whucd_03970.png 628 | whucd_06750.png 629 | whucd_02801.png 630 | whucd_05735.png 631 | whucd_03584.png 632 | whucd_03793.png 633 | whucd_05231.png 634 | whucd_04692.png 635 | whucd_04685.png 636 | whucd_05875.png 637 | whucd_02119.png 638 | whucd_03599.png 639 | whucd_04739.png 640 | whucd_04303.png 641 | whucd_04603.png 642 | whucd_04740.png 643 | whucd_01513.png 644 | whucd_02732.png 645 | whucd_01915.png 646 | whucd_02862.png 647 | whucd_00546.png 648 | whucd_06662.png 649 | whucd_01505.png 650 | whucd_00338.png 651 | whucd_06265.png 652 | whucd_05241.png 653 | whucd_02556.png 654 | whucd_01341.png 655 | whucd_06370.png 656 | whucd_03790.png 657 | whucd_05820.png 658 | whucd_06242.png 659 | whucd_04241.png 660 | whucd_01423.png 661 | whucd_02294.png 662 | whucd_01005.png 663 | whucd_00523.png 664 | whucd_06396.png 665 | whucd_06222.png 666 | whucd_07037.png 667 | whucd_05118.png 668 | whucd_06070.png 669 | whucd_00220.png 670 | whucd_02166.png 671 | whucd_01709.png 672 | whucd_02805.png 673 | whucd_02673.png 674 | whucd_03841.png 675 | whucd_03878.png 676 | whucd_00629.png 677 | whucd_00228.png 678 | whucd_03239.png 679 | whucd_00700.png 680 | whucd_02351.png 681 | whucd_04627.png 682 | whucd_03495.png 683 | whucd_02922.png 684 | whucd_00101.png 685 | whucd_02748.png 686 | whucd_01063.png 687 | whucd_04818.png 688 | whucd_01103.png 689 | whucd_04103.png 690 | whucd_03911.png 691 | whucd_06140.png 692 | whucd_04297.png 693 | whucd_02712.png 694 | whucd_01977.png 695 | whucd_06916.png 696 | whucd_01113.png 697 | whucd_06751.png 698 | whucd_04610.png 699 | whucd_03497.png 700 | whucd_04995.png 701 | whucd_00820.png 702 | whucd_07292.png 703 | whucd_02803.png 704 | whucd_05492.png 705 | whucd_05767.png 706 | whucd_02728.png 707 | whucd_02228.png 708 | whucd_02787.png 709 | whucd_06191.png 710 | whucd_05342.png 711 | whucd_06667.png 712 | whucd_03741.png 713 | whucd_02040.png 714 | whucd_05256.png 715 | whucd_02921.png 716 | whucd_02759.png 717 | whucd_04354.png 718 | whucd_06398.png 719 | whucd_06123.png 720 | whucd_00024.png 721 | whucd_03506.png 722 | whucd_05230.png 723 | whucd_05358.png 724 | whucd_05355.png 725 | whucd_03757.png 726 | whucd_04238.png 727 | whucd_03492.png 728 | whucd_05679.png 729 | whucd_02681.png 730 | whucd_06433.png 731 | whucd_02668.png 732 | whucd_01458.png 733 | whucd_02105.png 734 | whucd_06381.png 735 | whucd_04052.png 736 | whucd_06523.png 737 | whucd_03853.png 738 | whucd_01336.png 739 | whucd_03195.png 740 | whucd_07268.png 741 | whucd_00498.png 742 | whucd_05374.png 743 | whucd_00464.png 744 | whucd_01323.png 745 | whucd_05184.png 746 | whucd_01866.png 747 | whucd_04307.png 748 | whucd_03845.png 749 | whucd_02669.png 750 | whucd_01571.png 751 | whucd_06749.png 752 | whucd_06227.png 753 | whucd_07021.png 754 | whucd_06683.png 755 | whucd_03537.png 756 | whucd_04495.png 757 | whucd_01680.png 758 | whucd_04720.png 759 | whucd_05832.png 760 | whucd_02160.png 761 | whucd_04092.png 762 | whucd_02445.png 763 | whucd_06946.png 764 | whucd_06374.png 765 | whucd_01345.png 766 | whucd_05132.png 767 | whucd_04876.png 768 | whucd_02168.png 769 | whucd_00575.png 770 | whucd_01168.png 771 | whucd_04781.png 772 | whucd_05941.png 773 | whucd_01304.png 774 | whucd_01346.png 775 | whucd_01387.png 776 | whucd_07414.png 777 | whucd_00194.png 778 | whucd_03967.png 779 | whucd_05809.png 780 | whucd_06432.png 781 | whucd_07293.png 782 | whucd_00599.png 783 | whucd_00959.png 784 | whucd_04346.png 785 | whucd_06764.png 786 | whucd_04242.png 787 | whucd_02550.png 788 | whucd_06664.png 789 | whucd_02318.png 790 | whucd_05229.png 791 | whucd_06378.png 792 | whucd_00713.png 793 | whucd_01671.png 794 | whucd_03919.png 795 | whucd_05243.png 796 | whucd_01944.png 797 | whucd_04736.png 798 | whucd_03629.png 799 | whucd_04861.png 800 | whucd_01581.png 801 | whucd_04343.png 802 | whucd_03280.png 803 | whucd_05996.png 804 | whucd_02865.png 805 | whucd_04604.png 806 | whucd_03595.png 807 | whucd_00628.png 808 | whucd_01347.png 809 | whucd_02004.png 810 | whucd_03586.png 811 | whucd_03235.png 812 | whucd_03850.png 813 | whucd_03991.png 814 | whucd_07195.png 815 | whucd_00504.png 816 | whucd_02185.png 817 | whucd_06022.png 818 | whucd_04986.png 819 | whucd_05120.png 820 | whucd_03589.png 821 | whucd_04857.png 822 | whucd_05114.png 823 | whucd_04031.png 824 | whucd_00697.png 825 | whucd_03785.png 826 | whucd_02988.png 827 | whucd_07307.png 828 | whucd_07288.png 829 | whucd_04224.png 830 | whucd_01508.png 831 | whucd_02472.png 832 | whucd_01340.png 833 | whucd_01335.png 834 | whucd_07415.png 835 | whucd_07418.png 836 | whucd_00050.png 837 | whucd_04622.png 838 | whucd_05490.png 839 | whucd_05642.png 840 | whucd_01940.png 841 | whucd_01288.png 842 | whucd_00461.png 843 | whucd_02738.png 844 | whucd_06016.png 845 | whucd_03993.png 846 | whucd_01285.png 847 | whucd_02991.png 848 | whucd_05749.png 849 | whucd_06755.png 850 | whucd_04239.png 851 | whucd_05442.png 852 | whucd_01215.png 853 | whucd_06118.png 854 | whucd_04860.png 855 | whucd_06919.png 856 | whucd_04246.png 857 | whucd_02674.png 858 | whucd_01511.png 859 | whucd_00455.png 860 | whucd_01187.png 861 | whucd_00463.png 862 | whucd_03678.png 863 | whucd_03072.png 864 | whucd_06947.png 865 | whucd_05486.png 866 | whucd_05864.png 867 | whucd_00642.png 868 | whucd_03618.png 869 | whucd_05622.png 870 | whucd_06902.png 871 | whucd_04690.png 872 | whucd_01118.png 873 | whucd_05002.png 874 | whucd_06592.png 875 | whucd_00569.png 876 | whucd_04104.png 877 | whucd_03601.png 878 | whucd_01141.png 879 | whucd_00783.png 880 | whucd_01329.png 881 | whucd_04374.png 882 | whucd_05678.png 883 | whucd_07198.png 884 | whucd_03602.png 885 | whucd_01112.png 886 | whucd_00436.png 887 | whucd_01414.png 888 | whucd_03905.png 889 | whucd_01432.png 890 | whucd_01297.png 891 | whucd_02654.png 892 | whucd_03838.png 893 | whucd_02092.png 894 | whucd_05987.png 895 | whucd_04564.png 896 | whucd_00286.png 897 | whucd_03179.png 898 | whucd_07054.png 899 | whucd_03504.png 900 | whucd_04726.png 901 | whucd_01332.png 902 | whucd_06088.png 903 | whucd_01507.png 904 | whucd_05815.png 905 | whucd_02828.png 906 | whucd_07320.png 907 | whucd_05618.png 908 | whucd_06793.png 909 | -------------------------------------------------------------------------------- /data/WHU-CD-List/5_train_supervised.txt: -------------------------------------------------------------------------------- 1 | whucd_02547.png 2 | whucd_05868.png 3 | whucd_00871.png 4 | whucd_07001.png 5 | whucd_04331.png 6 | whucd_04228.png 7 | whucd_06246.png 8 | whucd_00630.png 9 | whucd_04199.png 10 | whucd_05344.png 11 | whucd_03872.png 12 | whucd_04598.png 13 | whucd_07147.png 14 | whucd_03283.png 15 | whucd_01681.png 16 | whucd_02180.png 17 | whucd_02309.png 18 | whucd_01313.png 19 | whucd_01438.png 20 | whucd_03711.png 21 | whucd_02864.png 22 | whucd_05053.png 23 | whucd_00738.png 24 | whucd_01140.png 25 | whucd_05244.png 26 | whucd_02540.png 27 | whucd_07290.png 28 | whucd_06515.png 29 | whucd_06065.png 30 | whucd_04359.png 31 | whucd_00076.png 32 | whucd_03974.png 33 | whucd_03150.png 34 | whucd_05995.png 35 | whucd_05258.png 36 | whucd_03620.png 37 | whucd_01722.png 38 | whucd_05123.png 39 | whucd_02224.png 40 | whucd_00142.png 41 | whucd_07335.png 42 | whucd_06469.png 43 | whucd_01092.png 44 | whucd_06397.png 45 | whucd_00361.png 46 | whucd_03534.png 47 | whucd_06881.png 48 | whucd_03846.png 49 | whucd_07276.png 50 | whucd_02978.png 51 | whucd_00574.png 52 | whucd_03907.png 53 | whucd_01850.png 54 | whucd_04125.png 55 | whucd_02663.png 56 | whucd_06647.png 57 | whucd_02711.png 58 | whucd_02480.png 59 | whucd_05992.png 60 | whucd_06413.png 61 | whucd_03505.png 62 | whucd_00443.png 63 | whucd_07392.png 64 | whucd_04111.png 65 | whucd_03340.png 66 | whucd_07039.png 67 | whucd_06895.png 68 | whucd_04094.png 69 | whucd_03466.png 70 | whucd_03867.png 71 | whucd_01065.png 72 | whucd_00694.png 73 | whucd_03042.png 74 | whucd_05891.png 75 | whucd_03103.png 76 | -------------------------------------------------------------------------------- /data/WHU-CD-List/WHU-CD.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/data/WHU-CD-List/WHU-CD.png -------------------------------------------------------------------------------- /data/__pycache__/semi_data.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/data/__pycache__/semi_data.cpython-38.pyc -------------------------------------------------------------------------------- /data/__pycache__/transform.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/data/__pycache__/transform.cpython-38.pyc -------------------------------------------------------------------------------- /data/generate_semicd_splits.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import math 4 | import random 5 | random.seed(10) 6 | 7 | file_list = os.path.join("train(org).txt") 8 | img_name_list = np.loadtxt(file_list, dtype=str) 9 | random.shuffle(img_name_list) 10 | 11 | N_total = len(img_name_list) 12 | print(f'Total number of images in the dataset: {N_total}') 13 | 14 | for per in range(10,90,10): 15 | print(f'===== Creating {per}% split =====') 16 | N_sup = math.floor((per/100)*len(img_name_list)) 17 | print(f'Number of supervised images: {N_sup}.') 18 | 19 | sup_img_name_list = img_name_list[0:N_sup] 20 | unsup_img_name_list = img_name_list[N_sup:] 21 | 22 | textfile_sup = open(str(per)+"_train_supervised.txt", "w") 23 | for element in sup_img_name_list: 24 | textfile_sup.write(element + "\n") 25 | textfile_sup.close() 26 | 27 | textfile_unsup = open(str(per)+"_train_unsupervised.txt", "w") 28 | for element in unsup_img_name_list: 29 | textfile_unsup.write(element + "\n") 30 | textfile_unsup.close() 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /data/semi_data.py: -------------------------------------------------------------------------------- 1 | from data.transform import crop, hflip, normalize, resize, blur, cutout 2 | 3 | import numpy as np 4 | import math 5 | import os 6 | from PIL import Image 7 | import random 8 | from torch.utils.data import Dataset 9 | from torchvision import transforms 10 | import torch 11 | import cv2 12 | 13 | 14 | def get_voc_pallete(num_classes): 15 | n = num_classes 16 | pallete = [0]*(n*3) 17 | for j in range(0,n): 18 | lab = j 19 | pallete[j*3+0] = 0 20 | pallete[j*3+1] = 0 21 | pallete[j*3+2] = 0 22 | i = 0 23 | while (lab > 0): 24 | pallete[j*3+0] |= (((lab >> 0) & 1) << (7-i)) 25 | pallete[j*3+1] |= (((lab >> 1) & 1) << (7-i)) 26 | pallete[j*3+2] |= (((lab >> 2) & 1) << (7-i)) 27 | i = i + 1 28 | lab >>= 3 29 | return pallete 30 | 31 | 32 | def get_normalized_vector(d): 33 | d /= 1e-12 + torch.max(torch.abs(d)) 34 | return d / torch.sqrt((1e-6 + torch.sum(torch.pow(d, 2.0)))) 35 | 36 | 37 | def generate_perturbation(x): 38 | d = torch.normal(torch.zeros(x.size()), torch.ones(x.size())) 39 | d = get_normalized_vector(d) 40 | d.requires_grad = False 41 | return 20 * get_normalized_vector(d) 42 | 43 | 44 | class SemiDataset(Dataset): 45 | def __init__(self, root, mode, label_percent): 46 | self.MEAN = [0.485, 0.456, 0.406] 47 | self.STD = [0.229, 0.224, 0.225] 48 | self.palette = get_voc_pallete(2) 49 | self.mode = mode 50 | self.root = root 51 | self.percnt_lbl = label_percent 52 | 53 | self.base_size = 256 54 | self.crop_size = 256 55 | self.flip = True 56 | self.scale = True 57 | self.image_padding = (np.array(self.MEAN) * 255.).tolist() 58 | 59 | # self.jitter_tf = transforms.ColorJitter(brightness=0.1, contrast=0.1, saturation=0.1, hue=0.1) 60 | self.jitter_tf_s = transforms.ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.3) 61 | self.to_tensor = transforms.ToTensor() 62 | self.normalize = transforms.Normalize(self.MEAN, self.STD) 63 | 64 | cv2.setNumThreads(0) 65 | 66 | if mode == 'semi': 67 | labeled_id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'train_supervised'}" + ".txt") 68 | unlabeled_id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'reliable_ids'}" + ".txt") 69 | unre_id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'unreliable_ids'}" + ".txt") 70 | # unlabeled_id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'train_unsupervised'}" + ".txt") 71 | with open(labeled_id_path, 'r') as f: 72 | self.labeled_ids = f.read().splitlines() 73 | with open(unlabeled_id_path, 'r') as f: 74 | self.unlabeled_ids = f.read().splitlines() 75 | with open(unre_id_path, 'r') as f: 76 | self.unrel_ids = f.read().splitlines() 77 | self.ids = \ 78 | self.labeled_ids * math.ceil(len(self.unlabeled_ids) / len(self.labeled_ids)) \ 79 | + self.unlabeled_ids + self.unrel_ids 80 | # self.ids = \ 81 | # self.labeled_ids * math.ceil(len(self.unlabeled_ids) / len(self.labeled_ids)) \ 82 | # + self.unlabeled_ids + self.unrel_ids 83 | else: 84 | if self.mode == "val": 85 | id_path = os.path.join(self.root, 'list', f"{self.mode}" + ".txt") 86 | elif self.mode == "test": 87 | id_path = os.path.join(self.root, 'list', f"{self.mode}" + ".txt") 88 | elif self.mode == "select": 89 | id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'train_unsupervised'}" + ".txt") 90 | elif self.mode == "label": 91 | id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'reliable_ids'}" + ".txt") 92 | # id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'train_unsupervised'}" + ".txt") 93 | elif self.mode == "train": 94 | id_path = os.path.join(self.root, 'list', f"{self.percnt_lbl}_{'train_supervised'}" + ".txt") 95 | with open(id_path, 'r') as f: 96 | self.ids = f.read().splitlines() 97 | 98 | def _resize(self, image_A, image_B, label, bigger_side_to_base_size=True): 99 | if isinstance(self.base_size, int): 100 | h, w, _ = image_A.shape 101 | if self.scale: 102 | longside = random.randint(int(self.base_size*0.5), int(self.base_size*2.0)) 103 | #longside = random.randint(int(self.base_size*0.5), int(self.base_size*1)) 104 | else: 105 | longside = self.base_size 106 | 107 | if bigger_side_to_base_size: 108 | h, w = (longside, int(1.0 * longside * w / h + 0.5)) if h > w else (int(1.0 * longside * h / w + 0.5), longside) 109 | else: 110 | h, w = (longside, int(1.0 * longside * w / h + 0.5)) if h < w else (int(1.0 * longside * h / w + 0.5), longside) 111 | image_A = np.asarray(Image.fromarray(np.uint8(image_A)).resize((w, h), Image.BICUBIC)) 112 | image_B = np.asarray(Image.fromarray(np.uint8(image_B)).resize((w, h), Image.BICUBIC)) 113 | label = cv2.resize(label, (w, h), interpolation=cv2.INTER_NEAREST) 114 | return image_A, image_B, label 115 | 116 | elif (isinstance(self.base_size, list) or isinstance(self.base_size, tuple)) and len(self.base_size) == 2: 117 | h, w, _ = image_A.shape 118 | if self.scale: 119 | scale = random.random() * 1.5 + 0.5 # Scaling between [0.5, 2] 120 | h, w = int(self.base_size[0] * scale), int(self.base_size[1] * scale) 121 | else: 122 | h, w = self.base_size 123 | image_A = np.asarray(Image.fromarray(np.uint8(image_A)).resize((w, h), Image.BICUBIC)) 124 | image_B = np.asarray(Image.fromarray(np.uint8(image_B)).resize((w, h), Image.BICUBIC)) 125 | label = cv2.resize(label, (w, h), interpolation=cv2.INTER_NEAREST) 126 | return image_A, image_B, label 127 | 128 | else: 129 | raise ValueError 130 | 131 | def _crop(self, image_A, image_B, label): 132 | # Padding to return the correct crop size 133 | if (isinstance(self.crop_size, list) or isinstance(self.crop_size, tuple)) and len(self.crop_size) == 2: 134 | crop_h, crop_w = self.crop_size 135 | elif isinstance(self.crop_size, int): 136 | crop_h, crop_w = self.crop_size, self.crop_size 137 | else: 138 | raise ValueError 139 | 140 | h, w, _ = image_A.shape 141 | pad_h = max(crop_h - h, 0) 142 | pad_w = max(crop_w - w, 0) 143 | pad_kwargs = { 144 | "top": 0, 145 | "bottom": pad_h, 146 | "left": 0, 147 | "right": pad_w, 148 | "borderType": cv2.BORDER_CONSTANT,} 149 | if pad_h > 0 or pad_w > 0: 150 | image_A = cv2.copyMakeBorder(image_A, value=self.image_padding, **pad_kwargs) 151 | image_B = cv2.copyMakeBorder(image_B, value=self.image_padding, **pad_kwargs) 152 | label = cv2.copyMakeBorder(label, value=0, **pad_kwargs) # use 0 for padding 153 | 154 | # Cropping 155 | h, w, _ = image_A.shape 156 | start_h = random.randint(0, h - crop_h) 157 | start_w = random.randint(0, w - crop_w) 158 | end_h = start_h + crop_h 159 | end_w = start_w + crop_w 160 | image_A = image_A[start_h:end_h, start_w:end_w] 161 | image_B = image_B[start_h:end_h, start_w:end_w] 162 | label = label[start_h:end_h, start_w:end_w] 163 | return image_A, image_B, label 164 | 165 | def _flip(self, image_A, image_B, label): 166 | # Random H flip 167 | if random.random() > 0.5: 168 | image_A = np.fliplr(image_A).copy() 169 | image_B = np.fliplr(image_B).copy() 170 | label = np.fliplr(label).copy() 171 | return image_A, image_B, label 172 | 173 | def __getitem__(self, item): 174 | image_id = self.ids[item] 175 | if self.mode == 'semi' and image_id in self.unrel_ids: 176 | if random.random() < 0.5: 177 | image_A_path = os.path.join(self.root, 'A', image_id) 178 | image_B_path = os.path.join(self.root, 'A', image_id) 179 | else: 180 | image_A_path = os.path.join(self.root, 'B', image_id) 181 | image_B_path = os.path.join(self.root, 'B', image_id) 182 | else: 183 | image_A_path = os.path.join(self.root, 'A', image_id) 184 | image_B_path = os.path.join(self.root, 'B', image_id) 185 | image_A = np.asarray(Image.open(image_A_path), dtype=np.float32) 186 | image_B = np.asarray(Image.open(image_B_path), dtype=np.float32) 187 | 188 | if self.mode == 'val' or self.mode == 'label' or self.mode == 'test' or self.mode == 'select': 189 | image_A = self.normalize(self.to_tensor(Image.fromarray(np.uint8(image_A)))) 190 | image_B = self.normalize(self.to_tensor(Image.fromarray(np.uint8(image_B)))) 191 | label_path = os.path.join(self.root, 'label', image_id) 192 | label = np.asarray(Image.open(label_path), dtype=np.int32) 193 | if label.ndim == 3: 194 | label = label[:, :, 0] 195 | 196 | label[label >= 1] = 1 197 | label = torch.from_numpy(np.array(label, dtype=np.int32)).long() 198 | return image_A, image_B, label, image_id 199 | 200 | if self.mode == 'train' or (self.mode == 'semi' and image_id in self.labeled_ids): 201 | label = np.asarray(Image.open(os.path.join(self.root, 'label', image_id)), dtype=np.int32) 202 | elif self.mode == 'semi' and image_id in self.unrel_ids: 203 | label = np.asarray(Image.open(os.path.join(self.root, 'label', '100.png')), dtype=np.int32) 204 | else: 205 | # mode == 'semi' and the id corresponds to unlabeled image 206 | # label = np.asarray(Image.open(os.path.join(self.root, 'ux_label', image_id)), dtype=np.int32) 207 | label = np.asarray(Image.open(os.path.join(self.root, f"{'pseudo_label'}_{self.percnt_lbl}", image_id)), dtype=np.int32) 208 | 209 | # basic augmentation on all training images 210 | h, w, _ = image_A.shape 211 | 212 | if self.base_size is not None: 213 | image_A, image_B, label = self._resize(image_A, image_B, label) 214 | 215 | if self.crop_size is not None: 216 | image_A, image_B, label = self._crop(image_A, image_B, label) 217 | 218 | if self.flip: 219 | image_A, image_B, label = self._flip(image_A, image_B, label) 220 | 221 | image_A = Image.fromarray(np.uint8(image_A)) 222 | image_B = Image.fromarray(np.uint8(image_B)) 223 | 224 | # strong augmentation on unlabeled images 225 | if self.mode == 'semi' and image_id in self.unlabeled_ids: 226 | if random.random() < 0.8: 227 | image_A = transforms.ColorJitter(0.5, 0.5, 0.5, 0.25)(image_A) 228 | image_B = transforms.ColorJitter(0.5, 0.5, 0.5, 0.25)(image_B) 229 | image_A = transforms.RandomGrayscale(p=0.2)(image_A) 230 | image_B = transforms.RandomGrayscale(p=0.2)(image_B) 231 | image_A = blur(image_A, p=0.5) 232 | image_B = blur(image_B, p=0.5) 233 | image_A, image_B, label = cutout(image_A, image_B, label, p=0.5) 234 | 235 | image_A = self.normalize(self.to_tensor(image_A)) 236 | image_B = self.normalize(self.to_tensor(image_B)) 237 | 238 | if label.ndim == 3: 239 | label = label[:, :, 0] 240 | label[label >= 1] = 1 241 | label = torch.from_numpy(np.array(label, dtype=np.int32)).long() 242 | return image_A, image_B, label, image_id 243 | 244 | def __len__(self): 245 | return len(self.ids) 246 | -------------------------------------------------------------------------------- /data/test.txt: -------------------------------------------------------------------------------- 1 | test_7_1.png 2 | test_7_2.png 3 | test_7_3.png 4 | test_7_4.png 5 | test_7_5.png 6 | test_7_6.png 7 | test_7_7.png 8 | test_7_8.png 9 | test_7_9.png 10 | test_7_10.png 11 | test_7_11.png 12 | test_7_12.png 13 | test_7_13.png 14 | test_7_14.png 15 | test_7_15.png 16 | test_7_16.png 17 | -------------------------------------------------------------------------------- /data/transform.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from PIL import Image, ImageOps, ImageFilter 3 | import random 4 | import torch 5 | from torchvision import transforms 6 | 7 | 8 | def crop(img, mask, size): 9 | # padding height or width if smaller than cropping size 10 | w, h = img.size 11 | padw = size - w if w < size else 0 12 | padh = size - h if h < size else 0 13 | img = ImageOps.expand(img, border=(0, 0, padw, padh), fill=0) 14 | mask = ImageOps.expand(mask, border=(0, 0, padw, padh), fill=255) 15 | 16 | # cropping 17 | w, h = img.size 18 | x = random.randint(0, w - size) 19 | y = random.randint(0, h - size) 20 | img = img.crop((x, y, x + size, y + size)) 21 | mask = mask.crop((x, y, x + size, y + size)) 22 | 23 | return img, mask 24 | 25 | 26 | def hflip(img, mask, p=0.5): 27 | if random.random() < p: 28 | img = img.transpose(Image.FLIP_LEFT_RIGHT) 29 | mask = mask.transpose(Image.FLIP_LEFT_RIGHT) 30 | return img, mask 31 | 32 | 33 | def normalize(img, mask=None): 34 | """ 35 | :param img: PIL image 36 | :param mask: PIL image, corresponding mask 37 | :return: normalized torch tensor of image and mask 38 | """ 39 | img = transforms.Compose([ 40 | transforms.ToTensor(), 41 | transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), 42 | ])(img) 43 | if mask is not None: 44 | mask = torch.from_numpy(np.array(mask)).long() 45 | return img, mask 46 | return img 47 | 48 | 49 | def resize(img, mask, base_size, ratio_range): 50 | w, h = img.size 51 | long_side = random.randint(int(base_size * ratio_range[0]), int(base_size * ratio_range[1])) 52 | 53 | if h > w: 54 | oh = long_side 55 | ow = int(1.0 * w * long_side / h + 0.5) 56 | else: 57 | ow = long_side 58 | oh = int(1.0 * h * long_side / w + 0.5) 59 | 60 | img = img.resize((ow, oh), Image.BILINEAR) 61 | mask = mask.resize((ow, oh), Image.NEAREST) 62 | return img, mask 63 | 64 | 65 | def blur(img, p=0.5): 66 | if random.random() < p: 67 | sigma = np.random.uniform(0.1, 2.0) 68 | img = img.filter(ImageFilter.GaussianBlur(radius=sigma)) 69 | return img 70 | 71 | 72 | def cutout(img_A, img_B, mask, p=0.5, size_min=0.02, size_max=0.4, ratio_1=0.3, 73 | ratio_2=1/0.3, value_min=0, value_max=255, pixel_level=True): 74 | if random.random() < p: 75 | img_A = np.array(img_A) 76 | img_B = np.array(img_B) 77 | mask = np.array(mask) 78 | 79 | img_h, img_w, img_c = img_A.shape 80 | 81 | while True: 82 | size = np.random.uniform(size_min, size_max) * img_h * img_w 83 | ratio = np.random.uniform(ratio_1, ratio_2) 84 | erase_w = int(np.sqrt(size / ratio)) 85 | erase_h = int(np.sqrt(size * ratio)) 86 | x = np.random.randint(0, img_w) 87 | y = np.random.randint(0, img_h) 88 | 89 | if x + erase_w <= img_w and y + erase_h <= img_h: 90 | break 91 | 92 | if pixel_level: 93 | value = np.random.uniform(value_min, value_max, (erase_h, erase_w, img_c)) 94 | else: 95 | value = np.random.uniform(value_min, value_max) 96 | 97 | img_A[y:y + erase_h, x:x + erase_w] = value 98 | img_B[y:y + erase_h, x:x + erase_w] = value 99 | mask[y:y + erase_h, x:x + erase_w] = 255 100 | 101 | img_A = Image.fromarray(img_A.astype(np.uint8)) 102 | img_B = Image.fromarray(img_B.astype(np.uint8)) 103 | # mask = Image.fromarray(mask.astype(np.uint8)) 104 | 105 | return img_A, img_B, mask 106 | -------------------------------------------------------------------------------- /data/val.txt: -------------------------------------------------------------------------------- 1 | val_10_10.png 2 | val_10_11.png 3 | val_10_12.png 4 | val_10_13.png 5 | val_10_14.png 6 | val_10_15.png 7 | val_10_16.png 8 | val_10_1.png 9 | val_10_2.png 10 | val_10_3.png 11 | val_10_4.png 12 | val_10_5.png 13 | val_10_6.png 14 | val_10_7.png 15 | val_10_8.png 16 | val_10_9.png 17 | val_1_10.png 18 | val_11_10.png 19 | val_11_11.png 20 | val_11_12.png 21 | val_11_13.png 22 | val_11_14.png 23 | val_11_15.png 24 | val_11_16.png 25 | val_1_11.png 26 | val_11_1.png 27 | val_1_12.png 28 | val_11_2.png 29 | val_1_13.png 30 | val_11_3.png 31 | val_1_14.png 32 | val_11_4.png 33 | val_1_15.png 34 | val_11_5.png 35 | val_1_16.png 36 | val_11_6.png 37 | val_11_7.png 38 | val_11_8.png 39 | val_11_9.png 40 | val_1_1.png 41 | val_12_10.png 42 | val_12_11.png 43 | val_12_12.png 44 | val_12_13.png 45 | val_12_14.png 46 | val_12_15.png 47 | val_12_16.png 48 | val_12_1.png 49 | val_12_2.png 50 | val_12_3.png 51 | val_12_4.png 52 | val_12_5.png 53 | val_12_6.png 54 | val_12_7.png 55 | val_12_8.png 56 | val_12_9.png 57 | val_1_2.png 58 | val_13_10.png 59 | val_13_11.png 60 | val_13_12.png 61 | val_13_13.png 62 | val_13_14.png 63 | val_13_15.png 64 | val_13_16.png 65 | val_13_1.png 66 | val_13_2.png 67 | val_13_3.png 68 | val_13_4.png 69 | val_13_5.png 70 | val_13_6.png 71 | val_13_7.png 72 | val_13_8.png 73 | val_13_9.png 74 | val_1_3.png 75 | val_14_10.png 76 | val_14_11.png 77 | val_14_12.png 78 | val_14_13.png 79 | val_14_14.png 80 | val_14_15.png 81 | val_14_16.png 82 | val_14_1.png 83 | val_14_2.png 84 | val_14_3.png 85 | val_14_4.png 86 | val_14_5.png 87 | val_14_6.png 88 | val_14_7.png 89 | val_14_8.png 90 | val_14_9.png 91 | val_1_4.png 92 | val_15_10.png 93 | val_15_11.png 94 | val_15_12.png 95 | val_15_13.png 96 | val_15_14.png 97 | val_15_15.png 98 | val_15_16.png 99 | val_15_1.png 100 | val_15_2.png 101 | val_15_3.png 102 | val_15_4.png 103 | val_15_5.png 104 | val_15_6.png 105 | val_15_7.png 106 | val_15_8.png 107 | val_15_9.png 108 | val_1_5.png 109 | val_16_10.png 110 | val_16_11.png 111 | val_16_12.png 112 | val_16_13.png 113 | val_16_14.png 114 | val_16_15.png 115 | val_16_16.png 116 | val_16_1.png 117 | val_16_2.png 118 | val_16_3.png 119 | val_16_4.png 120 | val_16_5.png 121 | val_16_6.png 122 | val_16_7.png 123 | val_16_8.png 124 | val_16_9.png 125 | val_1_6.png 126 | val_17_10.png 127 | val_17_11.png 128 | val_17_12.png 129 | val_17_13.png 130 | val_17_14.png 131 | val_17_15.png 132 | val_17_16.png 133 | val_17_1.png 134 | val_17_2.png 135 | val_17_3.png 136 | val_17_4.png 137 | val_17_5.png 138 | val_17_6.png 139 | val_17_7.png 140 | val_17_8.png 141 | val_17_9.png 142 | val_1_7.png 143 | val_18_10.png 144 | val_18_11.png 145 | val_18_12.png 146 | val_18_13.png 147 | val_18_14.png 148 | val_18_15.png 149 | val_18_16.png 150 | val_18_1.png 151 | val_18_2.png 152 | val_18_3.png 153 | val_18_4.png 154 | val_18_5.png 155 | val_18_6.png 156 | val_18_7.png 157 | val_18_8.png 158 | val_18_9.png 159 | val_1_8.png 160 | val_19_10.png 161 | val_19_11.png 162 | val_19_12.png 163 | val_19_13.png 164 | val_19_14.png 165 | val_19_15.png 166 | val_19_16.png 167 | val_19_1.png 168 | val_19_2.png 169 | val_19_3.png 170 | val_19_4.png 171 | val_19_5.png 172 | val_19_6.png 173 | val_19_7.png 174 | val_19_8.png 175 | val_19_9.png 176 | val_1_9.png 177 | val_20_10.png 178 | val_20_11.png 179 | val_20_12.png 180 | val_20_13.png 181 | val_20_14.png 182 | val_20_15.png 183 | val_20_16.png 184 | val_20_1.png 185 | val_20_2.png 186 | val_20_3.png 187 | val_20_4.png 188 | val_20_5.png 189 | val_20_6.png 190 | val_20_7.png 191 | val_20_8.png 192 | val_20_9.png 193 | val_2_10.png 194 | val_21_10.png 195 | val_21_11.png 196 | val_21_12.png 197 | val_21_13.png 198 | val_21_14.png 199 | val_21_15.png 200 | val_21_16.png 201 | val_2_11.png 202 | val_21_1.png 203 | val_2_12.png 204 | val_21_2.png 205 | val_2_13.png 206 | val_21_3.png 207 | val_2_14.png 208 | val_21_4.png 209 | val_2_15.png 210 | val_21_5.png 211 | val_2_16.png 212 | val_21_6.png 213 | val_21_7.png 214 | val_21_8.png 215 | val_21_9.png 216 | val_2_1.png 217 | val_22_10.png 218 | val_22_11.png 219 | val_22_12.png 220 | val_22_13.png 221 | val_22_14.png 222 | val_22_15.png 223 | val_22_16.png 224 | val_22_1.png 225 | val_22_2.png 226 | val_22_3.png 227 | val_22_4.png 228 | val_22_5.png 229 | val_22_6.png 230 | val_22_7.png 231 | val_22_8.png 232 | val_22_9.png 233 | val_2_2.png 234 | val_23_10.png 235 | val_23_11.png 236 | val_23_12.png 237 | val_23_13.png 238 | val_23_14.png 239 | val_23_15.png 240 | val_23_16.png 241 | val_23_1.png 242 | val_23_2.png 243 | val_23_3.png 244 | val_23_4.png 245 | val_23_5.png 246 | val_23_6.png 247 | val_23_7.png 248 | val_23_8.png 249 | val_23_9.png 250 | val_2_3.png 251 | val_24_10.png 252 | val_24_11.png 253 | val_24_12.png 254 | val_24_13.png 255 | val_24_14.png 256 | val_24_15.png 257 | val_24_16.png 258 | val_24_1.png 259 | val_24_2.png 260 | val_24_3.png 261 | val_24_4.png 262 | val_24_5.png 263 | val_24_6.png 264 | val_24_7.png 265 | val_24_8.png 266 | val_24_9.png 267 | val_2_4.png 268 | val_25_10.png 269 | val_25_11.png 270 | val_25_12.png 271 | val_25_13.png 272 | val_25_14.png 273 | val_25_15.png 274 | val_25_16.png 275 | val_25_1.png 276 | val_25_2.png 277 | val_25_3.png 278 | val_25_4.png 279 | val_25_5.png 280 | val_25_6.png 281 | val_25_7.png 282 | val_25_8.png 283 | val_25_9.png 284 | val_2_5.png 285 | val_26_10.png 286 | val_26_11.png 287 | val_26_12.png 288 | val_26_13.png 289 | val_26_14.png 290 | val_26_15.png 291 | val_26_16.png 292 | val_26_1.png 293 | val_26_2.png 294 | val_26_3.png 295 | val_26_4.png 296 | val_26_5.png 297 | val_26_6.png 298 | val_26_7.png 299 | val_26_8.png 300 | val_26_9.png 301 | val_2_6.png 302 | val_27_10.png 303 | val_27_11.png 304 | val_27_12.png 305 | val_27_13.png 306 | val_27_14.png 307 | val_27_15.png 308 | val_27_16.png 309 | val_27_1.png 310 | val_27_2.png 311 | val_27_3.png 312 | val_27_4.png 313 | val_27_5.png 314 | val_27_6.png 315 | val_27_7.png 316 | val_27_8.png 317 | val_27_9.png 318 | val_2_7.png 319 | val_28_10.png 320 | val_28_11.png 321 | val_28_12.png 322 | val_28_13.png 323 | val_28_14.png 324 | val_28_15.png 325 | val_28_16.png 326 | val_28_1.png 327 | val_28_2.png 328 | val_28_3.png 329 | val_28_4.png 330 | val_28_5.png 331 | val_28_6.png 332 | val_28_7.png 333 | val_28_8.png 334 | val_28_9.png 335 | val_2_8.png 336 | val_29_10.png 337 | val_29_11.png 338 | val_29_12.png 339 | val_29_13.png 340 | val_29_14.png 341 | val_29_15.png 342 | val_29_16.png 343 | val_29_1.png 344 | val_29_2.png 345 | val_29_3.png 346 | val_29_4.png 347 | val_29_5.png 348 | val_29_6.png 349 | val_29_7.png 350 | val_29_8.png 351 | val_29_9.png 352 | val_2_9.png 353 | val_30_10.png 354 | val_30_11.png 355 | val_30_12.png 356 | val_30_13.png 357 | val_30_14.png 358 | val_30_15.png 359 | val_30_16.png 360 | val_30_1.png 361 | val_30_2.png 362 | val_30_3.png 363 | val_30_4.png 364 | val_30_5.png 365 | val_30_6.png 366 | val_30_7.png 367 | val_30_8.png 368 | val_30_9.png 369 | val_3_10.png 370 | val_31_10.png 371 | val_31_11.png 372 | val_31_12.png 373 | val_31_13.png 374 | val_31_14.png 375 | val_31_15.png 376 | val_31_16.png 377 | val_3_11.png 378 | val_31_1.png 379 | val_3_12.png 380 | val_31_2.png 381 | val_3_13.png 382 | val_31_3.png 383 | val_3_14.png 384 | val_31_4.png 385 | val_3_15.png 386 | val_31_5.png 387 | val_3_16.png 388 | val_31_6.png 389 | val_31_7.png 390 | val_31_8.png 391 | val_31_9.png 392 | val_3_1.png 393 | val_32_10.png 394 | val_32_11.png 395 | val_32_12.png 396 | val_32_13.png 397 | val_32_14.png 398 | val_32_15.png 399 | val_32_16.png 400 | val_32_1.png 401 | val_32_2.png 402 | val_32_3.png 403 | val_32_4.png 404 | val_32_5.png 405 | val_32_6.png 406 | val_32_7.png 407 | val_32_8.png 408 | val_32_9.png 409 | val_3_2.png 410 | val_33_10.png 411 | val_33_11.png 412 | val_33_12.png 413 | val_33_13.png 414 | val_33_14.png 415 | val_33_15.png 416 | val_33_16.png 417 | val_33_1.png 418 | val_33_2.png 419 | val_33_3.png 420 | val_33_4.png 421 | val_33_5.png 422 | val_33_6.png 423 | val_33_7.png 424 | val_33_8.png 425 | val_33_9.png 426 | val_3_3.png 427 | val_34_10.png 428 | val_34_11.png 429 | val_34_12.png 430 | val_34_13.png 431 | val_34_14.png 432 | val_34_15.png 433 | val_34_16.png 434 | val_34_1.png 435 | val_34_2.png 436 | val_34_3.png 437 | val_34_4.png 438 | val_34_5.png 439 | val_34_6.png 440 | val_34_7.png 441 | val_34_8.png 442 | val_34_9.png 443 | val_3_4.png 444 | val_35_10.png 445 | val_35_11.png 446 | val_35_12.png 447 | val_35_13.png 448 | val_35_14.png 449 | val_35_15.png 450 | val_35_16.png 451 | val_35_1.png 452 | val_35_2.png 453 | val_35_3.png 454 | val_35_4.png 455 | val_35_5.png 456 | val_35_6.png 457 | val_35_7.png 458 | val_35_8.png 459 | val_35_9.png 460 | val_3_5.png 461 | val_36_10.png 462 | val_36_11.png 463 | val_36_12.png 464 | val_36_13.png 465 | val_36_14.png 466 | val_36_15.png 467 | val_36_16.png 468 | val_36_1.png 469 | val_36_2.png 470 | val_36_3.png 471 | val_36_4.png 472 | val_36_5.png 473 | val_36_6.png 474 | val_36_7.png 475 | val_36_8.png 476 | val_36_9.png 477 | val_3_6.png 478 | val_37_10.png 479 | val_37_11.png 480 | val_37_12.png 481 | val_37_13.png 482 | val_37_14.png 483 | val_37_15.png 484 | val_37_16.png 485 | val_37_1.png 486 | val_37_2.png 487 | val_37_3.png 488 | val_37_4.png 489 | val_37_5.png 490 | val_37_6.png 491 | val_37_7.png 492 | val_37_8.png 493 | val_37_9.png 494 | val_3_7.png 495 | val_38_10.png 496 | val_38_11.png 497 | val_38_12.png 498 | val_38_13.png 499 | val_38_14.png 500 | val_38_15.png 501 | val_38_16.png 502 | val_38_1.png 503 | val_38_2.png 504 | val_38_3.png 505 | val_38_4.png 506 | val_38_5.png 507 | val_38_6.png 508 | val_38_7.png 509 | val_38_8.png 510 | val_38_9.png 511 | val_3_8.png 512 | val_39_10.png 513 | val_39_11.png 514 | val_39_12.png 515 | val_39_13.png 516 | val_39_14.png 517 | val_39_15.png 518 | val_39_16.png 519 | val_39_1.png 520 | val_39_2.png 521 | val_39_3.png 522 | val_39_4.png 523 | val_39_5.png 524 | val_39_6.png 525 | val_39_7.png 526 | val_39_8.png 527 | val_39_9.png 528 | val_3_9.png 529 | val_40_10.png 530 | val_40_11.png 531 | val_40_12.png 532 | val_40_13.png 533 | val_40_14.png 534 | val_40_15.png 535 | val_40_16.png 536 | val_40_1.png 537 | val_40_2.png 538 | val_40_3.png 539 | val_40_4.png 540 | val_40_5.png 541 | val_40_6.png 542 | val_40_7.png 543 | val_40_8.png 544 | val_40_9.png 545 | val_4_10.png 546 | val_41_10.png 547 | val_41_11.png 548 | val_41_12.png 549 | val_41_13.png 550 | val_41_14.png 551 | val_41_15.png 552 | val_41_16.png 553 | val_4_11.png 554 | val_41_1.png 555 | val_4_12.png 556 | val_41_2.png 557 | val_4_13.png 558 | val_41_3.png 559 | val_4_14.png 560 | val_41_4.png 561 | val_4_15.png 562 | val_41_5.png 563 | val_4_16.png 564 | val_41_6.png 565 | val_41_7.png 566 | val_41_8.png 567 | val_41_9.png 568 | val_4_1.png 569 | val_42_10.png 570 | val_42_11.png 571 | val_42_12.png 572 | val_42_13.png 573 | val_42_14.png 574 | val_42_15.png 575 | val_42_16.png 576 | val_42_1.png 577 | val_42_2.png 578 | val_42_3.png 579 | val_42_4.png 580 | val_42_5.png 581 | val_42_6.png 582 | val_42_7.png 583 | val_42_8.png 584 | val_42_9.png 585 | val_4_2.png 586 | val_43_10.png 587 | val_43_11.png 588 | val_43_12.png 589 | val_43_13.png 590 | val_43_14.png 591 | val_43_15.png 592 | val_43_16.png 593 | val_43_1.png 594 | val_43_2.png 595 | val_43_3.png 596 | val_43_4.png 597 | val_43_5.png 598 | val_43_6.png 599 | val_43_7.png 600 | val_43_8.png 601 | val_43_9.png 602 | val_4_3.png 603 | val_44_10.png 604 | val_44_11.png 605 | val_44_12.png 606 | val_44_13.png 607 | val_44_14.png 608 | val_44_15.png 609 | val_44_16.png 610 | val_44_1.png 611 | val_44_2.png 612 | val_44_3.png 613 | val_44_4.png 614 | val_44_5.png 615 | val_44_6.png 616 | val_44_7.png 617 | val_44_8.png 618 | val_44_9.png 619 | val_4_4.png 620 | val_45_10.png 621 | val_45_11.png 622 | val_45_12.png 623 | val_45_13.png 624 | val_45_14.png 625 | val_45_15.png 626 | val_45_16.png 627 | val_45_1.png 628 | val_45_2.png 629 | val_45_3.png 630 | val_45_4.png 631 | val_45_5.png 632 | val_45_6.png 633 | val_45_7.png 634 | val_45_8.png 635 | val_45_9.png 636 | val_4_5.png 637 | val_46_10.png 638 | val_46_11.png 639 | val_46_12.png 640 | val_46_13.png 641 | val_46_14.png 642 | val_46_15.png 643 | val_46_16.png 644 | val_46_1.png 645 | val_46_2.png 646 | val_46_3.png 647 | val_46_4.png 648 | val_46_5.png 649 | val_46_6.png 650 | val_46_7.png 651 | val_46_8.png 652 | val_46_9.png 653 | val_4_6.png 654 | val_47_10.png 655 | val_47_11.png 656 | val_47_12.png 657 | val_47_13.png 658 | val_47_14.png 659 | val_47_15.png 660 | val_47_16.png 661 | val_47_1.png 662 | val_47_2.png 663 | val_47_3.png 664 | val_47_4.png 665 | val_47_5.png 666 | val_47_6.png 667 | val_47_7.png 668 | val_47_8.png 669 | val_47_9.png 670 | val_4_7.png 671 | val_48_10.png 672 | val_48_11.png 673 | val_48_12.png 674 | val_48_13.png 675 | val_48_14.png 676 | val_48_15.png 677 | val_48_16.png 678 | val_48_1.png 679 | val_48_2.png 680 | val_48_3.png 681 | val_48_4.png 682 | val_48_5.png 683 | val_48_6.png 684 | val_48_7.png 685 | val_48_8.png 686 | val_48_9.png 687 | val_4_8.png 688 | val_49_10.png 689 | val_49_11.png 690 | val_49_12.png 691 | val_49_13.png 692 | val_49_14.png 693 | val_49_15.png 694 | val_49_16.png 695 | val_49_1.png 696 | val_49_2.png 697 | val_49_3.png 698 | val_49_4.png 699 | val_49_5.png 700 | val_49_6.png 701 | val_49_7.png 702 | val_49_8.png 703 | val_49_9.png 704 | val_4_9.png 705 | val_50_10.png 706 | val_50_11.png 707 | val_50_12.png 708 | val_50_13.png 709 | val_50_14.png 710 | val_50_15.png 711 | val_50_16.png 712 | val_50_1.png 713 | val_50_2.png 714 | val_50_3.png 715 | val_50_4.png 716 | val_50_5.png 717 | val_50_6.png 718 | val_50_7.png 719 | val_50_8.png 720 | val_50_9.png 721 | val_5_10.png 722 | val_51_10.png 723 | val_51_11.png 724 | val_51_12.png 725 | val_51_13.png 726 | val_51_14.png 727 | val_51_15.png 728 | val_51_16.png 729 | val_5_11.png 730 | val_51_1.png 731 | val_5_12.png 732 | val_51_2.png 733 | val_5_13.png 734 | val_51_3.png 735 | val_5_14.png 736 | val_51_4.png 737 | val_5_15.png 738 | val_51_5.png 739 | val_5_16.png 740 | val_51_6.png 741 | val_51_7.png 742 | val_51_8.png 743 | val_51_9.png 744 | val_5_1.png 745 | val_52_10.png 746 | val_52_11.png 747 | val_52_12.png 748 | val_52_13.png 749 | val_52_14.png 750 | val_52_15.png 751 | val_52_16.png 752 | val_52_1.png 753 | val_52_2.png 754 | val_52_3.png 755 | val_52_4.png 756 | val_52_5.png 757 | val_52_6.png 758 | val_52_7.png 759 | val_52_8.png 760 | val_52_9.png 761 | val_5_2.png 762 | val_53_10.png 763 | val_53_11.png 764 | val_53_12.png 765 | val_53_13.png 766 | val_53_14.png 767 | val_53_15.png 768 | val_53_16.png 769 | val_53_1.png 770 | val_53_2.png 771 | val_53_3.png 772 | val_53_4.png 773 | val_53_5.png 774 | val_53_6.png 775 | val_53_7.png 776 | val_53_8.png 777 | val_53_9.png 778 | val_5_3.png 779 | val_54_10.png 780 | val_54_11.png 781 | val_54_12.png 782 | val_54_13.png 783 | val_54_14.png 784 | val_54_15.png 785 | val_54_16.png 786 | val_54_1.png 787 | val_54_2.png 788 | val_54_3.png 789 | val_54_4.png 790 | val_54_5.png 791 | val_54_6.png 792 | val_54_7.png 793 | val_54_8.png 794 | val_54_9.png 795 | val_5_4.png 796 | val_55_10.png 797 | val_55_11.png 798 | val_55_12.png 799 | val_55_13.png 800 | val_55_14.png 801 | val_55_15.png 802 | val_55_16.png 803 | val_55_1.png 804 | val_55_2.png 805 | val_55_3.png 806 | val_55_4.png 807 | val_55_5.png 808 | val_55_6.png 809 | val_55_7.png 810 | val_55_8.png 811 | val_55_9.png 812 | val_5_5.png 813 | val_56_10.png 814 | val_56_11.png 815 | val_56_12.png 816 | val_56_13.png 817 | val_56_14.png 818 | val_56_15.png 819 | val_56_16.png 820 | val_56_1.png 821 | val_56_2.png 822 | val_56_3.png 823 | val_56_4.png 824 | val_56_5.png 825 | val_56_6.png 826 | val_56_7.png 827 | val_56_8.png 828 | val_56_9.png 829 | val_5_6.png 830 | val_57_10.png 831 | val_57_11.png 832 | val_57_12.png 833 | val_57_13.png 834 | val_57_14.png 835 | val_57_15.png 836 | val_57_16.png 837 | val_57_1.png 838 | val_57_2.png 839 | val_57_3.png 840 | val_57_4.png 841 | val_57_5.png 842 | val_57_6.png 843 | val_57_7.png 844 | val_57_8.png 845 | val_57_9.png 846 | val_5_7.png 847 | val_58_10.png 848 | val_58_11.png 849 | val_58_12.png 850 | val_58_13.png 851 | val_58_14.png 852 | val_58_15.png 853 | val_58_16.png 854 | val_58_1.png 855 | val_58_2.png 856 | val_58_3.png 857 | val_58_4.png 858 | val_58_5.png 859 | val_58_6.png 860 | val_58_7.png 861 | val_58_8.png 862 | val_58_9.png 863 | val_5_8.png 864 | val_59_10.png 865 | val_59_11.png 866 | val_59_12.png 867 | val_59_13.png 868 | val_59_14.png 869 | val_59_15.png 870 | val_59_16.png 871 | val_59_1.png 872 | val_59_2.png 873 | val_59_3.png 874 | val_59_4.png 875 | val_59_5.png 876 | val_59_6.png 877 | val_59_7.png 878 | val_59_8.png 879 | val_59_9.png 880 | val_5_9.png 881 | val_60_10.png 882 | val_60_11.png 883 | val_60_12.png 884 | val_60_13.png 885 | val_60_14.png 886 | val_60_15.png 887 | val_60_16.png 888 | val_60_1.png 889 | val_60_2.png 890 | val_60_3.png 891 | val_60_4.png 892 | val_60_5.png 893 | val_60_6.png 894 | val_60_7.png 895 | val_60_8.png 896 | val_60_9.png 897 | val_6_10.png 898 | val_61_10.png 899 | val_61_11.png 900 | val_61_12.png 901 | val_61_13.png 902 | val_61_14.png 903 | val_61_15.png 904 | val_61_16.png 905 | val_6_11.png 906 | val_61_1.png 907 | val_6_12.png 908 | val_61_2.png 909 | val_6_13.png 910 | val_61_3.png 911 | val_6_14.png 912 | val_61_4.png 913 | val_6_15.png 914 | val_61_5.png 915 | val_6_16.png 916 | val_61_6.png 917 | val_61_7.png 918 | val_61_8.png 919 | val_61_9.png 920 | val_6_1.png 921 | val_62_10.png 922 | val_62_11.png 923 | val_62_12.png 924 | val_62_13.png 925 | val_62_14.png 926 | val_62_15.png 927 | val_62_16.png 928 | val_62_1.png 929 | val_62_2.png 930 | val_62_3.png 931 | val_62_4.png 932 | val_62_5.png 933 | val_62_6.png 934 | val_62_7.png 935 | val_62_8.png 936 | val_62_9.png 937 | val_6_2.png 938 | val_63_10.png 939 | val_63_11.png 940 | val_63_12.png 941 | val_63_13.png 942 | val_63_14.png 943 | val_63_15.png 944 | val_63_16.png 945 | val_63_1.png 946 | val_63_2.png 947 | val_63_3.png 948 | val_63_4.png 949 | val_63_5.png 950 | val_63_6.png 951 | val_63_7.png 952 | val_63_8.png 953 | val_63_9.png 954 | val_6_3.png 955 | val_64_10.png 956 | val_64_11.png 957 | val_64_12.png 958 | val_64_13.png 959 | val_64_14.png 960 | val_64_15.png 961 | val_64_16.png 962 | val_64_1.png 963 | val_64_2.png 964 | val_64_3.png 965 | val_64_4.png 966 | val_64_5.png 967 | val_64_6.png 968 | val_64_7.png 969 | val_64_8.png 970 | val_64_9.png 971 | val_6_4.png 972 | val_6_5.png 973 | val_6_6.png 974 | val_6_7.png 975 | val_6_8.png 976 | val_6_9.png 977 | val_7_10.png 978 | val_7_11.png 979 | val_7_12.png 980 | val_7_13.png 981 | val_7_14.png 982 | val_7_15.png 983 | val_7_16.png 984 | val_7_1.png 985 | val_7_2.png 986 | val_7_3.png 987 | val_7_4.png 988 | val_7_5.png 989 | val_7_6.png 990 | val_7_7.png 991 | val_7_8.png 992 | val_7_9.png 993 | val_8_10.png 994 | val_8_11.png 995 | val_8_12.png 996 | val_8_13.png 997 | val_8_14.png 998 | val_8_15.png 999 | val_8_16.png 1000 | val_8_1.png 1001 | val_8_2.png 1002 | val_8_3.png 1003 | val_8_4.png 1004 | val_8_5.png 1005 | val_8_6.png 1006 | val_8_7.png 1007 | val_8_8.png 1008 | val_8_9.png 1009 | val_9_10.png 1010 | val_9_11.png 1011 | val_9_12.png 1012 | val_9_13.png 1013 | val_9_14.png 1014 | val_9_15.png 1015 | val_9_16.png 1016 | val_9_1.png 1017 | val_9_2.png 1018 | val_9_3.png 1019 | val_9_4.png 1020 | val_9_5.png 1021 | val_9_6.png 1022 | val_9_7.png 1023 | val_9_8.png 1024 | val_9_9.png 1025 | -------------------------------------------------------------------------------- /inference.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import torch 3 | import models 4 | import data 5 | from torch.nn import CrossEntropyLoss, DataParallel 6 | from torch.optim import SGD 7 | from torch.utils.data import DataLoader 8 | from tqdm import tqdm 9 | import numpy as np 10 | import os 11 | from PIL import Image 12 | from data.semi_data import SemiDataset 13 | import math 14 | from torch.optim.lr_scheduler import _LRScheduler 15 | from torch.utils import tensorboard 16 | from models.Change_Detection import CD_Model 17 | from torchvision import transforms 18 | import torch.nn.functional as F 19 | from copy import deepcopy 20 | import random 21 | import warnings 22 | 23 | 24 | def colorize_mask(mask, palette): 25 | zero_pad = 256 * 3 - len(palette) 26 | for i in range(zero_pad): 27 | palette.append(0) 28 | palette[-3:] = [255, 255, 255] 29 | new_mask = Image.fromarray(mask.astype(np.uint8)).convert('P') 30 | new_mask.putpalette(palette) 31 | return new_mask 32 | 33 | 34 | def get_voc_pallete(num_classes): 35 | n = num_classes 36 | pallete = [0]*(n*3) 37 | for j in range(0, n): 38 | lab = j 39 | pallete[j*3+0] = 0 40 | pallete[j*3+1] = 0 41 | pallete[j*3+2] = 0 42 | i = 0 43 | while (lab > 0): 44 | pallete[j*3+0] |= (((lab >> 0) & 1) << (7-i)) 45 | pallete[j*3+1] |= (((lab >> 1) & 1) << (7-i)) 46 | pallete[j*3+2] |= (((lab >> 2) & 1) << (7-i)) 47 | i = i + 1 48 | lab >>= 3 49 | return pallete 50 | 51 | 52 | warnings.filterwarnings("ignore") 53 | 54 | 55 | class Poly(_LRScheduler): 56 | def __init__(self, optimizer, num_epochs, iters_per_epoch, warmup_epochs=0, last_epoch=-1): 57 | self.iters_per_epoch = iters_per_epoch 58 | self.cur_iter = 0 59 | self.N = num_epochs * iters_per_epoch 60 | self.warmup_iters = warmup_epochs * iters_per_epoch 61 | super(Poly, self).__init__(optimizer, last_epoch) 62 | 63 | def get_lr(self): 64 | T = self.last_epoch * self.iters_per_epoch + self.cur_iter 65 | factor = pow((1 - 1.0 * T / self.N), 0.9) 66 | if self.warmup_iters > 0 and T < self.warmup_iters: 67 | factor = 1.0 * T / self.warmup_iters 68 | 69 | self.cur_iter %= self.iters_per_epoch 70 | self.cur_iter += 1 71 | assert factor >= 0, 'error in lr_scheduler' 72 | return [base_lr * factor for base_lr in self.base_lrs] 73 | 74 | 75 | def _get_available_devices(n_gpu): 76 | sys_gpu = torch.cuda.device_count() 77 | if sys_gpu == 0: 78 | n_gpu = 0 79 | elif n_gpu > sys_gpu: 80 | n_gpu = sys_gpu 81 | device = torch.device('cuda:0' if n_gpu > 0 else 'cpu') 82 | available_gpus = list(range(n_gpu)) 83 | return device, available_gpus 84 | 85 | 86 | class DeNormalize(object): 87 | def __init__(self, mean, std): 88 | self.mean = mean 89 | self.std = std 90 | 91 | def __call__(self, tensor): 92 | for t, m, s in zip(tensor, self.mean, self.std): 93 | t.mul_(s).add_(m) 94 | return tensor 95 | 96 | 97 | class meanIOU: 98 | def __init__(self, num_classes): 99 | self.num_classes = num_classes 100 | self.hist = np.zeros((num_classes, num_classes)) 101 | 102 | def _fast_hist(self, label_pred, label_true): 103 | mask = (label_true >= 0) & (label_true < self.num_classes) 104 | hist = np.bincount( 105 | self.num_classes * label_true[mask].astype(int) + 106 | label_pred[mask], minlength=self.num_classes ** 2).reshape(self.num_classes, self.num_classes) 107 | return hist 108 | 109 | def add_batch(self, predictions, gts): 110 | for lp, lt in zip(predictions, gts): 111 | self.hist += self._fast_hist(lp.flatten(), lt.flatten()) 112 | 113 | def evaluate(self): 114 | iu = np.diag(self.hist) / (self.hist.sum(axis=1) + self.hist.sum(axis=0) - np.diag(self.hist)) 115 | return iu, np.nanmean(iu) 116 | 117 | 118 | def batch_pix_accuracy(predict, target): 119 | 120 | # _, predict = torch.max(output, 1) 121 | 122 | predict = predict.int() + 1 123 | target = target.int() + 1 124 | 125 | pixel_labeled = (target > 0).sum() 126 | pixel_correct = ((predict == target)*(target > 0)).sum() 127 | assert pixel_correct <= pixel_labeled, "Correct area should be smaller than Labeled" 128 | return pixel_correct.cpu().numpy(), pixel_labeled.cpu().numpy() 129 | 130 | 131 | def txt2list(txt_path): 132 | # 功能:读取txt文件,并转化为list形式 133 | # txt_path:txt的路径; 134 | 135 | data_list = [] 136 | with open(txt_path) as f: 137 | data = f.readlines() 138 | for line in data: 139 | line = line.strip("\n") # 去除末尾的换行符 140 | data_list.append(line) 141 | 142 | # data_array = np.array(data_list) 143 | return data_list 144 | 145 | 146 | def parse_args(): 147 | parser = argparse.ArgumentParser(description='RC Semi Change Framework') 148 | # basic settings 149 | parser.add_argument('--label_percent', type=int, choices=[5, 10, 20, 40], default=5) 150 | parser.add_argument('--semi', type=bool, default=True) 151 | parser.add_argument('--data_dir', type=str, default='/home/chrisd/change/RC-Semi-Change/data/WHU') 152 | parser.add_argument('--batch_size', type=int, default=5) 153 | parser.add_argument('--epochs', type=int, default=60) 154 | 155 | args = parser.parse_args() 156 | return args 157 | 158 | 159 | def main(args): 160 | import os 161 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 162 | torch.backends.cudnn.benchmark = True 163 | torch.manual_seed(42) 164 | 165 | valset = SemiDataset(args.data_dir, 'test', label_percent=args.label_percent) 166 | valloader = DataLoader(valset, batch_size=1, shuffle=False, pin_memory=True, num_workers=4, drop_last=False) 167 | 168 | restore_transform = transforms.Compose([ 169 | DeNormalize(valset.MEAN, valset.STD), 170 | transforms.ToPILImage()]) 171 | viz_transform = transforms.Compose([transforms.ToTensor()]) 172 | 173 | # <====================== Supervised training with labeled images (SupOnly) ======================> 174 | 175 | print('\n================> Total stage: ' 176 | 'Test images......') 177 | 178 | model, optimizer = init_basic_elems() 179 | 180 | model_i = deepcopy(model) 181 | # check_or = torch.load('./saved/LEVIR-CD/semi5_RC/semi5_model.pth') 182 | model_i.module.load_state_dict(torch.load('./saved/WHU-CD/semi5_RC/20.00_model.pth')) 183 | 184 | model = test(model_i, valloader, args) 185 | 186 | 187 | def init_basic_elems(): 188 | 189 | model = CD_Model(num_classes=2) 190 | # SETTING THE DEVICE 191 | device, availble_gpus = _get_available_devices(1) 192 | model = torch.nn.DataParallel(model, device_ids=availble_gpus) 193 | model.to(device) 194 | 195 | optimizer = SGD([{'params': filter(lambda p: p.requires_grad, model.module.get_other_params())}, 196 | {'params': filter(lambda p: p.requires_grad, model.module.get_backbone_params()), 197 | 'lr': 1e-2 / 10}], lr=1e-2, momentum=0.9, weight_decay=1e-4) 198 | 199 | return model, optimizer 200 | 201 | 202 | def comtrastive_loss(pred, target, mean=False): 203 | output_pred = F.softmax(pred, dim=1) 204 | postive_pred = output_pred[:5] 205 | negtive_pred = output_pred[5:] 206 | M = target[:5].clone().float() 207 | 208 | loss_pos = F.mse_loss(postive_pred[:, 0, :, :], negtive_pred[:, 0, :, :], reduction='none') * (1 - M) 209 | loss_neg1 = F.mse_loss(postive_pred[:, 0, :, :], negtive_pred[:, 1, :, :], reduction='none') * M 210 | loss_neg2 = F.mse_loss(postive_pred[:, 1, :, :], negtive_pred[:, 0, :, :], reduction='none') * M 211 | 212 | loss_ct = loss_pos + loss_neg1 + loss_neg2 213 | if mean: 214 | loss_ct = loss_ct.mean() 215 | return loss_ct 216 | 217 | 218 | def test(model, valloader, args): 219 | 220 | metric = meanIOU(num_classes=2) 221 | model.eval() 222 | tbar = tqdm(valloader) 223 | num_classes = 2 224 | palette = get_voc_pallete(num_classes) 225 | 226 | with torch.no_grad(): 227 | for image_A, image_B, label, image_id in tbar: 228 | image_A, image_B = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True) 229 | pred_or = model(image_A, image_B) 230 | pred = torch.argmax(pred_or, dim=1).cpu() 231 | 232 | prediction = pred_or.data.cpu().numpy().squeeze(0) 233 | 234 | metric.add_batch(pred.numpy(), label.numpy()) 235 | mIOU = metric.evaluate()[0][1] 236 | 237 | # pred[pred == 1] = 255 238 | # pred = Image.fromarray(pred.squeeze(0).numpy().astype(np.uint8), mode='L') 239 | 240 | prediction = np.asarray(np.argmax(prediction, axis=0), dtype=np.uint8) 241 | prediction_im = colorize_mask(prediction, palette) 242 | # prediction_im.save('./outputs/' + config["experim_name"] + '/' + image_id + '.png') 243 | 244 | prediction_im.save(os.path.join('./outputs/WHU/r5/20/', os.path.basename(image_id[0]))) 245 | 246 | tbar.set_description('mIOU: %.2f' % (mIOU * 100.0)) 247 | 248 | print(mIOU) 249 | 250 | return model 251 | 252 | 253 | def train(mode, model, trainloader, valloader, optimizer, args): 254 | iters = 0 255 | previous_best = 0.0 256 | 257 | if mode == 'train': 258 | checkpoints = [] 259 | 260 | iter_per_epoch = len(trainloader) 261 | lr_scheduler = Poly(optimizer=optimizer, num_epochs=args.epochs, iters_per_epoch=iter_per_epoch) 262 | 263 | writer_dir = os.path.join(args.save_dir, mode) 264 | writer = tensorboard.SummaryWriter(writer_dir) 265 | 266 | for epoch in range(1, args.epochs + 1): 267 | print("\n==> Epoch %i, previous best = %.2f" % (epoch, previous_best)) 268 | 269 | model.train() 270 | wrt_mode = 'train' 271 | total_loss = 0.0 272 | total_ce_loss = 0.0 273 | total_ct_loss = 0.0 274 | tbar = tqdm(trainloader) 275 | 276 | for i, (image_A, image_B, label, image_id) in enumerate(tbar): 277 | image_A, image_B, label = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True), \ 278 | label.cuda(non_blocking=True) 279 | optimizer.zero_grad() 280 | 281 | # A_l_aug = image_A 282 | # B_l_aug = image_B 283 | # target_l_aug = label 284 | 285 | if random.random() < 0.5: 286 | A_l_aug = torch.cat((image_A, image_A), dim=0) 287 | B_l_aug = torch.cat((image_B, image_A), dim=0) 288 | target_l_aug = torch.cat((label, label * 0), dim=0) 289 | else: 290 | A_l_aug = torch.cat((image_A, image_B), dim=0) 291 | B_l_aug = torch.cat((image_B, image_B), dim=0) 292 | target_l_aug = torch.cat((label, label * 0), dim=0) 293 | 294 | pred = model(A_l_aug, B_l_aug) 295 | loss_ce = F.cross_entropy(input=pred, target=target_l_aug) 296 | loss_ct = comtrastive_loss(pred, target_l_aug, mean=True) 297 | loss = loss_ce + loss_ct 298 | 299 | loss.backward() 300 | optimizer.step() 301 | 302 | total_loss += loss.item() 303 | total_ce_loss += loss_ce.item() 304 | total_ct_loss += loss_ct.item() 305 | 306 | iters += 1 307 | lr_scheduler.step(epoch=epoch - 1) 308 | 309 | tbar.set_description('CE_Loss: %.3f CT_Loss: %.3f' % (total_ce_loss / (i + 1), total_ct_loss / (i + 1))) 310 | 311 | writer.add_scalar(f'{mode}/{wrt_mode}/CE_Loss', total_ce_loss / (i + 1), iters) 312 | writer.add_scalar(f'{mode}/{wrt_mode}/CT_Loss', total_ct_loss / (i + 1), iters) 313 | 314 | metric = meanIOU(num_classes=2) 315 | 316 | model.eval() 317 | wrt_mode = 'val' 318 | tbar = tqdm(valloader) 319 | total_correct, total_label = 0, 0 320 | 321 | with torch.no_grad(): 322 | for image_A, image_B, label, image_id in tbar: 323 | label, image_A, image_B = label.cuda(non_blocking=True), image_A.cuda(non_blocking=True), \ 324 | image_B.cuda(non_blocking=True) 325 | pred = model(image_A, image_B) 326 | pred = torch.argmax(pred, dim=1) 327 | 328 | metric.add_batch(pred.cpu().numpy(), label.cpu().numpy()) 329 | cIOU = metric.evaluate()[0][1] 330 | 331 | correct, labeled = batch_pix_accuracy(pred, label) 332 | 333 | total_correct, total_label = total_correct + correct, total_label + labeled 334 | 335 | tbar.set_description('cIOU: %.2f PA: %.2f' % 336 | (cIOU * 100.0, (1.0 * total_correct / (np.spacing(1) + total_label)))) 337 | 338 | cIOU *= 100.0 339 | pixAcc = 1.0 * total_correct / (np.spacing(1) + total_label) 340 | 341 | writer.add_scalar(f'{mode}/{wrt_mode}/cIOU', cIOU, epoch) 342 | writer.add_scalar(f'{mode}/{wrt_mode}/pixAcc', pixAcc, epoch) 343 | 344 | if cIOU > previous_best: 345 | if previous_best != 0: 346 | os.remove(os.path.join(args.save_dir, '%.2f_best_model.pth' % previous_best)) 347 | previous_best = cIOU 348 | torch.save(model.module.state_dict(), os.path.join(args.save_dir, '%.2f_best_model.pth' % cIOU)) 349 | best_model = deepcopy(model) 350 | 351 | if mode == 'train' and (epoch in [args.epochs // 3, args.epochs * 2 // 3, args.epochs]): 352 | checkpoints.append(deepcopy(model)) 353 | torch.save(model.module.state_dict(), os.path.join(args.save_dir, '%.2f_model.pth' % epoch)) 354 | 355 | if mode == 'train': 356 | return best_model, checkpoints 357 | 358 | return best_model 359 | 360 | 361 | def select_reliable(models, dataloader, args): 362 | 363 | for i in range(len(models)): 364 | models[i].eval() 365 | tbar = tqdm(dataloader) 366 | 367 | id_to_reliability = [] 368 | 369 | with torch.no_grad(): 370 | for image_A, image_B, label, image_id in tbar: 371 | label, image_A, image_B = label.cuda(non_blocking=True), image_A.cuda(non_blocking=True), \ 372 | image_B.cuda(non_blocking=True) 373 | 374 | preds = [] 375 | for model in models: 376 | preds.append(torch.argmax(model(image_A, image_B), dim=1).cpu().numpy()) 377 | 378 | mIOU = [] 379 | for i in range(len(preds) - 1): 380 | metric = meanIOU(num_classes=2) 381 | metric.add_batch(preds[i], preds[-1]) 382 | # re_i = metric.evaluate()[0][1] 383 | # if np.isnan(re_i): 384 | # mIOU.append(0.0) 385 | # else: 386 | # mIOU.append(re_i) 387 | mIOU.append(metric.evaluate()[-1]) 388 | 389 | reliability = sum(mIOU) / len(mIOU) 390 | id_to_reliability.append((image_id, reliability)) 391 | 392 | id_to_reliability.sort(key=lambda elem: elem[1], reverse=True) 393 | with open(os.path.join(args.data_dir, 'list', f"{args.label_percent}_{'reliable_ids'}" + ".txt"), 'w') as f: 394 | for elem in id_to_reliability[:len(id_to_reliability) // 2]: 395 | f.write(elem[0][0] + '\n') 396 | with open(os.path.join(args.data_dir, 'list', f"{args.label_percent}_{'unreliable_ids'}" + ".txt"), 'w') as f: 397 | for elem in id_to_reliability[len(id_to_reliability) // 2:]: 398 | f.write(elem[0][0] + '\n') 399 | 400 | 401 | def label(model, dataloader): 402 | model.eval() 403 | tbar = tqdm(dataloader) 404 | 405 | metric = meanIOU(num_classes=2) 406 | 407 | with torch.no_grad(): 408 | for image_A, image_B, label, image_id in tbar: 409 | image_A, image_B = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True) 410 | pred = model(image_A, image_B) 411 | pred = torch.argmax(pred, dim=1).cpu() 412 | 413 | metric.add_batch(pred.numpy(), label.numpy()) 414 | mIOU = metric.evaluate()[0][1] 415 | # mIOU = metric.evaluate()[-1] 416 | 417 | pred[pred == 1] = 255 418 | pred = Image.fromarray(pred.squeeze(0).numpy().astype(np.uint8), mode='L') 419 | pred.save(os.path.join('./data/WHU/pseudo_label10/', os.path.basename(image_id[0]))) 420 | 421 | tbar.set_description('mIOU: %.2f' % (mIOU * 100.0)) 422 | 423 | 424 | if __name__ == '__main__': 425 | args = parse_args() 426 | print(args) 427 | main(args) 428 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import torch 3 | import models 4 | import data 5 | from torch.nn import CrossEntropyLoss, DataParallel 6 | from torch.optim import SGD 7 | from torch.utils.data import DataLoader 8 | from tqdm import tqdm 9 | import numpy as np 10 | import os 11 | from PIL import Image 12 | from data.semi_data import SemiDataset 13 | import math 14 | from torch.optim.lr_scheduler import _LRScheduler 15 | from torch.utils import tensorboard 16 | from models.Change_Detection import CD_Model 17 | from torchvision import transforms 18 | import torch.nn.functional as F 19 | from copy import deepcopy 20 | import random 21 | import warnings 22 | 23 | 24 | warnings.filterwarnings("ignore") 25 | 26 | 27 | class Poly(_LRScheduler): 28 | def __init__(self, optimizer, num_epochs, iters_per_epoch, warmup_epochs=0, last_epoch=-1): 29 | self.iters_per_epoch = iters_per_epoch 30 | self.cur_iter = 0 31 | self.N = num_epochs * iters_per_epoch 32 | self.warmup_iters = warmup_epochs * iters_per_epoch 33 | super(Poly, self).__init__(optimizer, last_epoch) 34 | 35 | def get_lr(self): 36 | T = self.last_epoch * self.iters_per_epoch + self.cur_iter 37 | factor = pow((1 - 1.0 * T / self.N), 0.9) 38 | if self.warmup_iters > 0 and T < self.warmup_iters: 39 | factor = 1.0 * T / self.warmup_iters 40 | 41 | self.cur_iter %= self.iters_per_epoch 42 | self.cur_iter += 1 43 | assert factor >= 0, 'error in lr_scheduler' 44 | return [base_lr * factor for base_lr in self.base_lrs] 45 | 46 | 47 | def _get_available_devices(n_gpu): 48 | sys_gpu = torch.cuda.device_count() 49 | if sys_gpu == 0: 50 | n_gpu = 0 51 | elif n_gpu > sys_gpu: 52 | n_gpu = sys_gpu 53 | device = torch.device('cuda:0' if n_gpu > 0 else 'cpu') 54 | available_gpus = list(range(n_gpu)) 55 | return device, available_gpus 56 | 57 | 58 | class DeNormalize(object): 59 | def __init__(self, mean, std): 60 | self.mean = mean 61 | self.std = std 62 | 63 | def __call__(self, tensor): 64 | for t, m, s in zip(tensor, self.mean, self.std): 65 | t.mul_(s).add_(m) 66 | return tensor 67 | 68 | 69 | class meanIOU: 70 | def __init__(self, num_classes): 71 | self.num_classes = num_classes 72 | self.hist = np.zeros((num_classes, num_classes)) 73 | 74 | def _fast_hist(self, label_pred, label_true): 75 | mask = (label_true >= 0) & (label_true < self.num_classes) 76 | hist = np.bincount( 77 | self.num_classes * label_true[mask].astype(int) + 78 | label_pred[mask], minlength=self.num_classes ** 2).reshape(self.num_classes, self.num_classes) 79 | return hist 80 | 81 | def add_batch(self, predictions, gts): 82 | for lp, lt in zip(predictions, gts): 83 | self.hist += self._fast_hist(lp.flatten(), lt.flatten()) 84 | 85 | def evaluate(self): 86 | iu = np.diag(self.hist) / (self.hist.sum(axis=1) + self.hist.sum(axis=0) - np.diag(self.hist)) 87 | return iu, np.nanmean(iu) 88 | 89 | 90 | def batch_pix_accuracy(predict, target): 91 | 92 | # _, predict = torch.max(output, 1) 93 | 94 | predict = predict.int() + 1 95 | target = target.int() + 1 96 | 97 | pixel_labeled = (target > 0).sum() 98 | pixel_correct = ((predict == target)*(target > 0)).sum() 99 | assert pixel_correct <= pixel_labeled, "Correct area should be smaller than Labeled" 100 | return pixel_correct.cpu().numpy(), pixel_labeled.cpu().numpy() 101 | 102 | 103 | def txt2list(txt_path): 104 | # 功能:读取txt文件,并转化为list形式 105 | # txt_path:txt的路径; 106 | 107 | data_list = [] 108 | with open(txt_path) as f: 109 | data = f.readlines() 110 | for line in data: 111 | line = line.strip("\n") # 去除末尾的换行符 112 | data_list.append(line) 113 | 114 | # data_array = np.array(data_list) 115 | return data_list 116 | 117 | # cp /home/chrisd/change/RC-Semi-Change/data/LEVIR/A/* /home/chrisd/change/RCSCD/data/SZTAKI-ext/A/ 118 | 119 | def parse_args(): 120 | parser = argparse.ArgumentParser(description='RC Semi Change Detection Framework') 121 | # basic settings 122 | parser.add_argument('--save_dir', type=str, default='./saved/WHU/semi20-ext-levir/') 123 | parser.add_argument('--label_percent', type=int, choices=[5, 10, 20, 40, 100], default=20) 124 | parser.add_argument('--semi', type=bool, default=True) 125 | parser.add_argument('--data_dir', type=str, default='./data/SZTAKI-ext') 126 | parser.add_argument('--batch_size', type=int, default=5) 127 | parser.add_argument('--epochs', type=int, default=100) 128 | 129 | args = parser.parse_args() 130 | return args 131 | 132 | 133 | def main(args): 134 | # os.environ["CUDA_VISIBLE_DEVICES"] = "1" 135 | torch.backends.cudnn.benchmark = True 136 | torch.manual_seed(42) 137 | 138 | valset = SemiDataset(args.data_dir, 'val', label_percent=args.label_percent) 139 | valloader = DataLoader(valset, batch_size=1, shuffle=False, pin_memory=True, num_workers=4, drop_last=False) 140 | 141 | # <====================== Supervised training with labeled images (SupOnly) ======================> 142 | 143 | mode = 'train' 144 | trainset = SemiDataset(args.data_dir, mode, label_percent=args.label_percent) 145 | trainset.ids = 2 * trainset.ids if len(trainset.ids) < 200 else trainset.ids 146 | trainloader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, 147 | pin_memory=True, num_workers=16, drop_last=True) 148 | 149 | model, optimizer = init_basic_elems() 150 | 151 | step1 = True 152 | if step1: 153 | if args.semi: 154 | best_model, checkpoints = train(mode, model, trainloader, valloader, optimizer, args) 155 | print('\n================> Total stage 1: ' 156 | 'Supervised training on labeled images (SupOnly + CT_loss)') 157 | else: 158 | best_model = train_sup(mode, model, trainloader, valloader, optimizer, args) 159 | print('\n================> Total stage 1: ' 160 | 'Supervised training on labeled images (SupOnly)') 161 | exit() 162 | 163 | # <===================================== Select Reliable IDs =====================================> 164 | print('\n\n\n================> Total stage 2/6: Select reliable images for the 1st stage re-training') 165 | step2 = True 166 | if step2: 167 | mode = 'select' 168 | dataset = SemiDataset(args.data_dir, mode, label_percent=args.label_percent) 169 | dataloader = DataLoader(dataset, batch_size=1, shuffle=False, pin_memory=True, num_workers=4, drop_last=False) 170 | 171 | # models = [] 172 | # for i in range(3): 173 | # model_i = deepcopy(model) 174 | # model_i.module.load_state_dict(torch.load('./step_models/%d_model.pth' % i)) 175 | # models.append(model_i) 176 | # select_reliable(models, dataloader, args) 177 | 178 | select_reliable(checkpoints, dataloader, args) 179 | 180 | # <================================ Pseudo label reliable images =================================> 181 | print('\n\n\n================> Total stage 3/6: Pseudo labeling reliable images') 182 | step3 = True 183 | if step3: 184 | mode = 'label' 185 | dataset = SemiDataset(args.data_dir, mode, label_percent=args.label_percent) 186 | dataloader = DataLoader(dataset, batch_size=1, shuffle=False, pin_memory=True, num_workers=4, drop_last=False) 187 | 188 | # best_model = deepcopy(model) 189 | # best_model.module.load_state_dict(torch.load('./step_models/best_model.pth')) 190 | label(best_model, dataloader, args) 191 | 192 | # <================================== The 1st stage re-training ==================================> 193 | print('\n\n\n================> Total stage 4/6: The 1st stage re-training on labeled and reliable unlabeled images') 194 | step4 = True 195 | if step4: 196 | mode = 'semi' 197 | trainset = SemiDataset(args.data_dir, mode, label_percent=args.label_percent) 198 | trainloader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, 199 | pin_memory=True, num_workers=16, drop_last=True) 200 | 201 | model, optimizer = init_basic_elems() 202 | 203 | final_model = train(mode, model, trainloader, valloader, optimizer, args) 204 | 205 | torch.save(final_model.module.state_dict(), 206 | os.path.join(args.save_dir, f"{args.label_percent}_{'semi_model'}" + ".pth")) 207 | 208 | 209 | def init_basic_elems(): 210 | 211 | model = CD_Model(num_classes=2) 212 | # SETTING THE DEVICE 213 | device, availble_gpus = _get_available_devices(1) 214 | model = torch.nn.DataParallel(model, device_ids=availble_gpus) 215 | model.to(device) 216 | 217 | optimizer = SGD([{'params': filter(lambda p: p.requires_grad, model.module.get_other_params())}, 218 | {'params': filter(lambda p: p.requires_grad, model.module.get_backbone_params()), 219 | 'lr': 1e-2 / 10}], lr=1e-2, momentum=0.9, weight_decay=1e-4) 220 | 221 | return model, optimizer 222 | 223 | 224 | def comtrastive_loss(pred, target, mean=False): 225 | output_pred = F.softmax(pred, dim=1) 226 | postive_pred = output_pred[:5] 227 | negtive_pred = output_pred[5:] 228 | M = target[:5].clone().float() 229 | 230 | loss_pos = F.mse_loss(postive_pred[:, 0, :, :], negtive_pred[:, 0, :, :], reduction='none') * (1 - M) 231 | loss_neg1 = F.mse_loss(postive_pred[:, 0, :, :], negtive_pred[:, 1, :, :], reduction='none') * M 232 | loss_neg2 = F.mse_loss(postive_pred[:, 1, :, :], negtive_pred[:, 0, :, :], reduction='none') * M 233 | 234 | loss_ct = loss_pos + loss_neg1 + loss_neg2 235 | if mean: 236 | loss_ct = loss_ct.mean() 237 | return loss_ct 238 | 239 | 240 | def train_sup(mode, model, trainloader, valloader, optimizer, args): 241 | iters = 0 242 | previous_best = 0.0 243 | 244 | iter_per_epoch = len(trainloader) 245 | lr_scheduler = Poly(optimizer=optimizer, num_epochs=args.epochs, iters_per_epoch=iter_per_epoch) 246 | 247 | writer_dir = os.path.join(args.save_dir, mode) 248 | writer = tensorboard.SummaryWriter(writer_dir) 249 | 250 | for epoch in range(1, args.epochs + 1): 251 | print("\n==> Epoch %i, previous best = %.2f" % (epoch, previous_best)) 252 | 253 | model.train() 254 | wrt_mode = 'train' 255 | total_loss = 0.0 256 | tbar = tqdm(trainloader) 257 | 258 | for i, (image_A, image_B, label, image_id) in enumerate(tbar): 259 | image_A, image_B, label = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True), \ 260 | label.cuda(non_blocking=True) 261 | optimizer.zero_grad() 262 | pred = model(image_A, image_B) 263 | loss = F.cross_entropy(input=pred, target=label) 264 | loss.backward() 265 | optimizer.step() 266 | total_loss += loss.item() 267 | iters += 1 268 | lr_scheduler.step(epoch=epoch - 1) 269 | tbar.set_description('CE_Loss: %.3f ' % (total_loss / (i + 1))) 270 | writer.add_scalar(f'{mode}/{wrt_mode}/CE_Loss', total_loss / (i + 1), iters) 271 | 272 | metric = meanIOU(num_classes=2) 273 | 274 | if epoch % 1 == 0: 275 | model.eval() 276 | wrt_mode = 'val' 277 | tbar = tqdm(valloader) 278 | total_correct, total_label = 0, 0 279 | with torch.no_grad(): 280 | for image_A, image_B, label, image_id in tbar: 281 | label, image_A, image_B = label.cuda(non_blocking=True), image_A.cuda(non_blocking=True), \ 282 | image_B.cuda(non_blocking=True) 283 | pred = model(image_A, image_B) 284 | pred = torch.argmax(pred, dim=1) 285 | 286 | metric.add_batch(pred.cpu().numpy(), label.cpu().numpy()) 287 | cIOU = metric.evaluate()[0][1] 288 | correct, labeled = batch_pix_accuracy(pred, label) 289 | total_correct, total_label = total_correct + correct, total_label + labeled 290 | 291 | tbar.set_description('cIOU: %.2f PA: %.2f' % 292 | (cIOU * 100.0, (1.0 * total_correct / (np.spacing(1) + total_label)))) 293 | cIOU *= 100.0 294 | pixAcc = 1.0 * total_correct / (np.spacing(1) + total_label) 295 | 296 | writer.add_scalar(f'{mode}/{wrt_mode}/cIOU', cIOU, epoch) 297 | writer.add_scalar(f'{mode}/{wrt_mode}/pixAcc', pixAcc, epoch) 298 | 299 | if cIOU > previous_best: 300 | if previous_best != 0: 301 | os.remove(os.path.join(args.save_dir, '%.2f_best_model.pth' % previous_best)) 302 | previous_best = cIOU 303 | torch.save(model.module.state_dict(), os.path.join(args.save_dir, '%.2f_best_model.pth' % cIOU)) 304 | best_model = deepcopy(model) 305 | 306 | return best_model 307 | 308 | 309 | def train(mode, model, trainloader, valloader, optimizer, args): 310 | iters = 0 311 | previous_best = 0.0 312 | 313 | if mode == 'train': 314 | checkpoints = [] 315 | 316 | iter_per_epoch = len(trainloader) 317 | lr_scheduler = Poly(optimizer=optimizer, num_epochs=args.epochs, iters_per_epoch=iter_per_epoch) 318 | 319 | writer_dir = os.path.join(args.save_dir, mode) 320 | writer = tensorboard.SummaryWriter(writer_dir) 321 | 322 | for epoch in range(1, args.epochs + 1): 323 | print("\n==> Epoch %i, previous best = %.2f" % (epoch, previous_best)) 324 | 325 | model.train() 326 | wrt_mode = 'train' 327 | total_loss = 0.0 328 | total_ce_loss = 0.0 329 | total_ct_loss = 0.0 330 | tbar = tqdm(trainloader) 331 | 332 | for i, (image_A, image_B, label, image_id) in enumerate(tbar): 333 | image_A, image_B, label = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True), \ 334 | label.cuda(non_blocking=True) 335 | optimizer.zero_grad() 336 | 337 | if random.random() < 0.5: 338 | A_l_aug = torch.cat((image_A, image_A), dim=0) 339 | B_l_aug = torch.cat((image_B, image_A), dim=0) 340 | target_l_aug = torch.cat((label, label * 0), dim=0) 341 | else: 342 | A_l_aug = torch.cat((image_A, image_B), dim=0) 343 | B_l_aug = torch.cat((image_B, image_B), dim=0) 344 | target_l_aug = torch.cat((label, label * 0), dim=0) 345 | 346 | pred = model(A_l_aug, B_l_aug) 347 | loss_ce = F.cross_entropy(input=pred, target=target_l_aug) 348 | loss_ct = comtrastive_loss(pred, target_l_aug, mean=True) 349 | loss = loss_ce + loss_ct 350 | 351 | loss.backward() 352 | optimizer.step() 353 | 354 | total_loss += loss.item() 355 | total_ce_loss += loss_ce.item() 356 | total_ct_loss += loss_ct.item() 357 | 358 | iters += 1 359 | lr_scheduler.step(epoch=epoch - 1) 360 | 361 | tbar.set_description('CE_Loss: %.3f CT_Loss: %.3f' % (total_ce_loss / (i + 1), total_ct_loss / (i + 1))) 362 | 363 | writer.add_scalar(f'{mode}/{wrt_mode}/CE_Loss', total_ce_loss / (i + 1), iters) 364 | writer.add_scalar(f'{mode}/{wrt_mode}/CT_Loss', total_ct_loss / (i + 1), iters) 365 | 366 | del image_A, image_B, label, pred, A_l_aug, B_l_aug, target_l_aug 367 | 368 | metric = meanIOU(num_classes=2) 369 | 370 | model.eval() 371 | wrt_mode = 'val' 372 | tbar = tqdm(valloader) 373 | total_correct, total_label = 0, 0 374 | 375 | with torch.no_grad(): 376 | for image_A, image_B, label, image_id in tbar: 377 | label, image_A, image_B = label.cuda(non_blocking=True), image_A.cuda(non_blocking=True), \ 378 | image_B.cuda(non_blocking=True) 379 | pred = model(image_A, image_B) 380 | pred = torch.argmax(pred, dim=1) 381 | 382 | metric.add_batch(pred.cpu().numpy(), label.cpu().numpy()) 383 | cIOU = metric.evaluate()[0][1] 384 | 385 | correct, labeled = batch_pix_accuracy(pred, label) 386 | 387 | total_correct, total_label = total_correct + correct, total_label + labeled 388 | 389 | tbar.set_description('cIOU: %.2f PA: %.2f' % 390 | (cIOU * 100.0, (1.0 * total_correct / (np.spacing(1) + total_label)))) 391 | 392 | cIOU *= 100.0 393 | pixAcc = 1.0 * total_correct / (np.spacing(1) + total_label) 394 | 395 | writer.add_scalar(f'{mode}/{wrt_mode}/cIOU', cIOU, epoch) 396 | writer.add_scalar(f'{mode}/{wrt_mode}/pixAcc', pixAcc, epoch) 397 | 398 | del label, image_A, image_B, pred 399 | 400 | if cIOU > previous_best: 401 | if previous_best != 0: 402 | os.remove(os.path.join(args.save_dir, '%.2f_best_model.pth' % previous_best)) 403 | previous_best = cIOU 404 | torch.save(model.module.state_dict(), os.path.join(args.save_dir, '%.2f_best_model.pth' % cIOU)) 405 | best_model = deepcopy(model) 406 | 407 | if mode == 'train' and (epoch in [args.epochs // 3, args.epochs * 2 // 3, args.epochs]): 408 | checkpoints.append(deepcopy(model)) 409 | if epoch == args.epochs: 410 | checkpoints.append(best_model) 411 | torch.save(model.module.state_dict(), os.path.join(args.save_dir, '%.2f_model.pth' % epoch)) 412 | 413 | if mode == 'train': 414 | return best_model, checkpoints 415 | 416 | return best_model 417 | 418 | 419 | def select_reliable(models, dataloader, args): 420 | 421 | for i in range(len(models)): 422 | models[i].eval() 423 | tbar = tqdm(dataloader) 424 | 425 | id_to_reliability = [] 426 | 427 | with torch.no_grad(): 428 | for image_A, image_B, label, image_id in tbar: 429 | label, image_A, image_B = label.cuda(non_blocking=True), image_A.cuda(non_blocking=True), \ 430 | image_B.cuda(non_blocking=True) 431 | 432 | preds = [] 433 | for model in models: 434 | preds.append(torch.argmax(model(image_A, image_B), dim=1).cpu().numpy()) 435 | 436 | mIOU = [] 437 | for i in range(len(preds) - 1): 438 | metric = meanIOU(num_classes=2) 439 | metric.add_batch(preds[i], preds[-1]) 440 | # re_i = metric.evaluate()[0][1] 441 | # if np.isnan(re_i): 442 | # mIOU.append(0.0) 443 | # else: 444 | # mIOU.append(re_i) 445 | mIOU.append(metric.evaluate()[-1]) 446 | 447 | reliability = sum(mIOU) / len(mIOU) 448 | id_to_reliability.append((image_id, reliability)) 449 | 450 | id_to_reliability.sort(key=lambda elem: elem[1], reverse=True) 451 | with open(os.path.join(args.data_dir, 'list', f"{args.label_percent}_{'reliable_ids'}" + ".txt"), 'w') as f: 452 | for elem in id_to_reliability[:len(id_to_reliability) // 2]: 453 | f.write(elem[0][0] + '\n') 454 | with open(os.path.join(args.data_dir, 'list', f"{args.label_percent}_{'unreliable_ids'}" + ".txt"), 'w') as f: 455 | for elem in id_to_reliability[len(id_to_reliability) // 2:]: 456 | f.write(elem[0][0] + '\n') 457 | 458 | 459 | def label(model, dataloader, args): 460 | model.eval() 461 | tbar = tqdm(dataloader) 462 | 463 | metric = meanIOU(num_classes=2) 464 | 465 | with torch.no_grad(): 466 | for image_A, image_B, label, image_id in tbar: 467 | image_A, image_B = image_A.cuda(non_blocking=True), image_B.cuda(non_blocking=True) 468 | pred = model(image_A, image_B) 469 | pred = torch.argmax(pred, dim=1).cpu() 470 | 471 | metric.add_batch(pred.numpy(), label.numpy()) 472 | mIOU = metric.evaluate()[0][1] 473 | # mIOU = metric.evaluate()[-1] 474 | 475 | pred[pred == 1] = 255 476 | pred = Image.fromarray(pred.squeeze(0).numpy().astype(np.uint8), mode='L') 477 | pred.save(os.path.join(f"{args.data_dir}{'/pseudo_label'}_{args.label_percent}", os.path.basename(image_id[0]))) 478 | 479 | tbar.set_description('mIOU: %.2f' % (mIOU * 100.0)) 480 | del label, image_A, image_B, pred 481 | 482 | 483 | if __name__ == '__main__': 484 | args = parse_args() 485 | print(args) 486 | main(args) 487 | -------------------------------------------------------------------------------- /models/Change_Detection.py: -------------------------------------------------------------------------------- 1 | import math, time 2 | from itertools import chain 3 | import torch 4 | import torch.nn.functional as F 5 | from torch import nn 6 | from models.base_model import BaseModel 7 | from models.decoders import * 8 | from models.encoder import Encoder 9 | from copy import deepcopy 10 | from torch.distributions.uniform import Uniform 11 | 12 | 13 | class CD_Model(BaseModel): 14 | def __init__(self, num_classes, pretrained=True): 15 | 16 | self.num_classes = num_classes 17 | super(CD_Model, self).__init__() 18 | 19 | # Create the model 20 | self.encoder = Encoder(pretrained=pretrained) 21 | 22 | # The main encoder 23 | upscale = 8 24 | num_out_ch = 2048 25 | decoder_in_ch = num_out_ch // 4 26 | self.main_decoder = MainDecoder(upscale, decoder_in_ch, num_classes=num_classes) 27 | 28 | self.aux = False 29 | # The auxilary decoders 30 | # if self.mode == 'semi' or self.mode == 'weakly_semi': 31 | # vat_decoder = [VATDecoder(upscale, decoder_in_ch, num_classes, xi=conf['xi'], 32 | # eps=conf['eps']) for _ in range(conf['vat'])] 33 | # drop_decoder = [DropOutDecoder(upscale, decoder_in_ch, num_classes, 34 | # drop_rate=conf['drop_rate'], spatial_dropout=conf['spatial']) 35 | # for _ in range(conf['drop'])] 36 | # cut_decoder = [CutOutDecoder(upscale, decoder_in_ch, num_classes, erase=conf['erase']) 37 | # for _ in range(conf['cutout'])] 38 | # context_m_decoder = [ContextMaskingDecoder(upscale, decoder_in_ch, num_classes) 39 | # for _ in range(conf['context_masking'])] 40 | # object_masking = [ObjectMaskingDecoder(upscale, decoder_in_ch, num_classes) 41 | # for _ in range(conf['object_masking'])] 42 | # feature_drop = [FeatureDropDecoder(upscale, decoder_in_ch, num_classes) 43 | # for _ in range(conf['feature_drop'])] 44 | # feature_noise = [FeatureNoiseDecoder(upscale, decoder_in_ch, num_classes, 45 | # uniform_range=conf['uniform_range']) 46 | # for _ in range(conf['feature_noise'])] 47 | # 48 | # self.aux_decoders = nn.ModuleList([*vat_decoder, *drop_decoder, *cut_decoder, 49 | # *context_m_decoder, *object_masking, *feature_drop, *feature_noise]) 50 | # self.aux_decoders = nn.ModuleList([*vat_decoder]) 51 | 52 | def forward(self, A_l=None, B_l=None): 53 | 54 | output_l = self.main_decoder(self.encoder(A_l, B_l)) 55 | 56 | return output_l 57 | 58 | def get_backbone_params(self): 59 | return self.encoder.get_backbone_params() 60 | 61 | def get_other_params(self): 62 | if self.aux: 63 | return chain(self.encoder.get_module_params(), self.main_decoder.parameters()) 64 | # return chain(self.encoder.get_module_params(), self.main_decoder.parameters(), 65 | # self.aux_decoders.parameters()) 66 | 67 | return chain(self.encoder.get_module_params(), self.main_decoder.parameters()) 68 | 69 | -------------------------------------------------------------------------------- /models/__pycache__/Change_Detection.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/__pycache__/Change_Detection.cpython-38.pyc -------------------------------------------------------------------------------- /models/__pycache__/base_model.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/__pycache__/base_model.cpython-38.pyc -------------------------------------------------------------------------------- /models/__pycache__/decoders.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/__pycache__/decoders.cpython-38.pyc -------------------------------------------------------------------------------- /models/__pycache__/encoder.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/__pycache__/encoder.cpython-38.pyc -------------------------------------------------------------------------------- /models/backbones/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__init__.py -------------------------------------------------------------------------------- /models/backbones/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/module_helper.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/module_helper.cpython-37.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/module_helper.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/module_helper.cpython-38.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/module_helper.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/module_helper.cpython-39.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_backbone.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_backbone.cpython-37.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_backbone.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_backbone.cpython-38.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_backbone.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_backbone.cpython-39.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_models.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_models.cpython-37.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_models.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_models.cpython-38.pyc -------------------------------------------------------------------------------- /models/backbones/__pycache__/resnet_models.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VCISwang/RC-Change-Detection/16a83e709e77f563a8f710b1d7be1f5123c469a5/models/backbones/__pycache__/resnet_models.cpython-39.pyc -------------------------------------------------------------------------------- /models/backbones/get_pretrained_model.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | FILENAME="models/backbones/pretrained/3x3resnet50-imagenet.pth" 4 | 5 | mkdir -p models/backbones/pretrained 6 | wget https://github.com/yassouali/CCT/releases/download/v0.1/3x3resnet50-imagenet.pth -O $FILENAME 7 | -------------------------------------------------------------------------------- /models/backbones/module_helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding:utf-8 -*- 3 | # Author: Donny You (youansheng@gmail.com) 4 | 5 | 6 | import os 7 | import torch 8 | import torch.nn as nn 9 | import torch.nn.functional as F 10 | 11 | try: 12 | from urllib import urlretrieve 13 | except ImportError: 14 | from urllib.request import urlretrieve 15 | 16 | class FixedBatchNorm(nn.BatchNorm2d): 17 | def forward(self, input): 18 | return F.batch_norm(input, self.running_mean, self.running_var, self.weight, self.bias, training=False, eps=self.eps) 19 | 20 | class ModuleHelper(object): 21 | 22 | @staticmethod 23 | def BNReLU(num_features, norm_type=None, **kwargs): 24 | if norm_type == 'batchnorm': 25 | return nn.Sequential( 26 | nn.BatchNorm2d(num_features, **kwargs), 27 | nn.ReLU() 28 | ) 29 | elif norm_type == 'encsync_batchnorm': 30 | from encoding.nn import BatchNorm2d 31 | return nn.Sequential( 32 | BatchNorm2d(num_features, **kwargs), 33 | nn.ReLU() 34 | ) 35 | elif norm_type == 'instancenorm': 36 | return nn.Sequential( 37 | nn.InstanceNorm2d(num_features, **kwargs), 38 | nn.ReLU() 39 | ) 40 | elif norm_type == 'fixed_batchnorm': 41 | return nn.Sequential( 42 | FixedBatchNorm(num_features, **kwargs), 43 | nn.ReLU() 44 | ) 45 | else: 46 | raise ValueError('Not support BN type: {}.'.format(norm_type)) 47 | 48 | @staticmethod 49 | def BatchNorm3d(norm_type=None, ret_cls=False): 50 | if norm_type == 'batchnorm': 51 | return nn.BatchNorm3d 52 | elif norm_type == 'encsync_batchnorm': 53 | from encoding.nn import BatchNorm3d 54 | return BatchNorm3d 55 | elif norm_type == 'instancenorm': 56 | return nn.InstanceNorm3d 57 | else: 58 | raise ValueError('Not support BN type: {}.'.format(norm_type)) 59 | 60 | @staticmethod 61 | def BatchNorm2d(norm_type=None, ret_cls=False): 62 | if norm_type == 'batchnorm': 63 | return nn.BatchNorm2d 64 | elif norm_type == 'encsync_batchnorm': 65 | from encoding.nn import BatchNorm2d 66 | return BatchNorm2d 67 | 68 | elif norm_type == 'instancenorm': 69 | return nn.InstanceNorm2d 70 | else: 71 | raise ValueError('Not support BN type: {}.'.format(norm_type)) 72 | 73 | @staticmethod 74 | def BatchNorm1d(norm_type=None, ret_cls=False): 75 | if norm_type == 'batchnorm': 76 | return nn.BatchNorm1d 77 | elif norm_type == 'encsync_batchnorm': 78 | from encoding.nn import BatchNorm1d 79 | return BatchNorm1d 80 | elif norm_type == 'instancenorm': 81 | return nn.InstanceNorm1d 82 | else: 83 | raise ValueError('Not support BN type: {}.'.format(norm_type)) 84 | 85 | @staticmethod 86 | def load_model(model, pretrained=None, all_match=True, map_location='cpu'): 87 | if pretrained is None: 88 | return model 89 | 90 | if not os.path.exists(pretrained): 91 | print('{} not exists.'.format(pretrained)) 92 | return model 93 | 94 | print('Loading pretrained model:{}'.format(pretrained)) 95 | if all_match: 96 | pretrained_dict = torch.load(pretrained, map_location=map_location) 97 | model_dict = model.state_dict() 98 | load_dict = dict() 99 | for k, v in pretrained_dict.items(): 100 | if 'prefix.{}'.format(k) in model_dict: 101 | load_dict['prefix.{}'.format(k)] = v 102 | else: 103 | load_dict[k] = v 104 | model.load_state_dict(load_dict) 105 | 106 | else: 107 | pretrained_dict = torch.load(pretrained) 108 | model_dict = model.state_dict() 109 | load_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict} 110 | print('Matched Keys: {}'.format(load_dict.keys())) 111 | model_dict.update(load_dict) 112 | model.load_state_dict(model_dict) 113 | 114 | return model 115 | 116 | @staticmethod 117 | def load_url(url, map_location=None): 118 | model_dir = os.path.join('~', '.TorchCV', 'model') 119 | if not os.path.exists(model_dir): 120 | os.makedirs(model_dir) 121 | 122 | filename = url.split('/')[-1] 123 | cached_file = os.path.join(model_dir, filename) 124 | if not os.path.exists(cached_file): 125 | print('Downloading: "{}" to {}\n'.format(url, cached_file)) 126 | urlretrieve(url, cached_file) 127 | 128 | print('Loading pretrained model:{}'.format(cached_file)) 129 | return torch.load(cached_file, map_location=map_location) 130 | 131 | @staticmethod 132 | def constant_init(module, val, bias=0): 133 | nn.init.constant_(module.weight, val) 134 | if hasattr(module, 'bias') and module.bias is not None: 135 | nn.init.constant_(module.bias, bias) 136 | 137 | @staticmethod 138 | def xavier_init(module, gain=1, bias=0, distribution='normal'): 139 | assert distribution in ['uniform', 'normal'] 140 | if distribution == 'uniform': 141 | nn.init.xavier_uniform_(module.weight, gain=gain) 142 | else: 143 | nn.init.xavier_normal_(module.weight, gain=gain) 144 | if hasattr(module, 'bias') and module.bias is not None: 145 | nn.init.constant_(module.bias, bias) 146 | 147 | @staticmethod 148 | def normal_init(module, mean=0, std=1, bias=0): 149 | nn.init.normal_(module.weight, mean, std) 150 | if hasattr(module, 'bias') and module.bias is not None: 151 | nn.init.constant_(module.bias, bias) 152 | 153 | @staticmethod 154 | def uniform_init(module, a=0, b=1, bias=0): 155 | nn.init.uniform_(module.weight, a, b) 156 | if hasattr(module, 'bias') and module.bias is not None: 157 | nn.init.constant_(module.bias, bias) 158 | 159 | @staticmethod 160 | def kaiming_init(module, 161 | mode='fan_in', 162 | nonlinearity='leaky_relu', 163 | bias=0, 164 | distribution='normal'): 165 | assert distribution in ['uniform', 'normal'] 166 | if distribution == 'uniform': 167 | nn.init.kaiming_uniform_( 168 | module.weight, mode=mode, nonlinearity=nonlinearity) 169 | else: 170 | nn.init.kaiming_normal_( 171 | module.weight, mode=mode, nonlinearity=nonlinearity) 172 | if hasattr(module, 'bias') and module.bias is not None: 173 | nn.init.constant_(module.bias, bias) 174 | 175 | -------------------------------------------------------------------------------- /models/backbones/resnet_backbone.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding:utf-8 -*- 3 | # Author: Donny You(youansheng@gmail.com) 4 | 5 | 6 | import torch.nn as nn 7 | from models.backbones.resnet_models import * 8 | 9 | 10 | class NormalResnetBackbone(nn.Module): 11 | def __init__(self, orig_resnet): 12 | super(NormalResnetBackbone, self).__init__() 13 | 14 | self.num_features = 2048 15 | # take pretrained resnet, except AvgPool and FC 16 | self.prefix = orig_resnet.prefix 17 | self.maxpool = orig_resnet.maxpool 18 | self.layer1 = orig_resnet.layer1 19 | self.layer2 = orig_resnet.layer2 20 | self.layer3 = orig_resnet.layer3 21 | self.layer4 = orig_resnet.layer4 22 | 23 | def get_num_features(self): 24 | return self.num_features 25 | 26 | def forward(self, x): 27 | tuple_features = list() 28 | x = self.prefix(x) 29 | x = self.maxpool(x) 30 | x = self.layer1(x) 31 | tuple_features.append(x) 32 | x = self.layer2(x) 33 | tuple_features.append(x) 34 | x = self.layer3(x) 35 | tuple_features.append(x) 36 | x = self.layer4(x) 37 | tuple_features.append(x) 38 | 39 | return tuple_features 40 | 41 | 42 | class DilatedResnetBackbone(nn.Module): 43 | def __init__(self, orig_resnet, dilate_scale=8, multi_grid=(1, 2, 4)): 44 | super(DilatedResnetBackbone, self).__init__() 45 | 46 | self.num_features = 2048 47 | from functools import partial 48 | 49 | if dilate_scale == 8: 50 | orig_resnet.layer3.apply(partial(self._nostride_dilate, dilate=2)) 51 | if multi_grid is None: 52 | orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=4)) 53 | else: 54 | for i, r in enumerate(multi_grid): 55 | orig_resnet.layer4[i].apply(partial(self._nostride_dilate, dilate=int(4 * r))) 56 | 57 | elif dilate_scale == 16: 58 | if multi_grid is None: 59 | orig_resnet.layer4.apply(partial(self._nostride_dilate, dilate=2)) 60 | else: 61 | for i, r in enumerate(multi_grid): 62 | orig_resnet.layer4[i].apply(partial(self._nostride_dilate, dilate=int(2 * r))) 63 | 64 | # Take pretrained resnet, except AvgPool and FC 65 | self.prefix = orig_resnet.prefix 66 | self.maxpool = orig_resnet.maxpool 67 | self.layer1 = orig_resnet.layer1 68 | self.layer2 = orig_resnet.layer2 69 | self.layer3 = orig_resnet.layer3 70 | self.layer4 = orig_resnet.layer4 71 | 72 | def _nostride_dilate(self, m, dilate): 73 | classname = m.__class__.__name__ 74 | if classname.find('Conv') != -1: 75 | # the convolution with stride 76 | if m.stride == (2, 2): 77 | m.stride = (1, 1) 78 | if m.kernel_size == (3, 3): 79 | m.dilation = (dilate // 2, dilate // 2) 80 | m.padding = (dilate // 2, dilate // 2) 81 | # other convoluions 82 | else: 83 | if m.kernel_size == (3, 3): 84 | m.dilation = (dilate, dilate) 85 | m.padding = (dilate, dilate) 86 | 87 | def get_num_features(self): 88 | return self.num_features 89 | 90 | def forward(self, x): 91 | tuple_features = list() 92 | x = self.prefix(x) 93 | x = self.maxpool(x) 94 | 95 | x = self.layer1(x) 96 | tuple_features.append(x) 97 | x = self.layer2(x) 98 | tuple_features.append(x) 99 | x = self.layer3(x) 100 | tuple_features.append(x) 101 | x = self.layer4(x) 102 | tuple_features.append(x) 103 | 104 | return tuple_features 105 | 106 | 107 | def ResNetBackbone(backbone=None, pretrained=None, multi_grid=None, norm_type='batchnorm'): 108 | arch = backbone 109 | if arch == 'resnet34': 110 | orig_resnet = resnet34(pretrained=pretrained) 111 | arch_net = NormalResnetBackbone(orig_resnet) 112 | arch_net.num_features = 512 113 | 114 | elif arch == 'resnet34_dilated8': 115 | orig_resnet = resnet34(pretrained=pretrained) 116 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=8, multi_grid=multi_grid) 117 | arch_net.num_features = 512 118 | 119 | elif arch == 'resnet34_dilated16': 120 | orig_resnet = resnet34(pretrained=pretrained) 121 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=16, multi_grid=multi_grid) 122 | arch_net.num_features = 512 123 | 124 | elif arch == 'resnet50': 125 | orig_resnet = resnet50(pretrained=pretrained) 126 | arch_net = NormalResnetBackbone(orig_resnet) 127 | 128 | elif arch == 'resnet50_dilated8': 129 | orig_resnet = resnet50(pretrained=pretrained) 130 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=8, multi_grid=multi_grid) 131 | 132 | elif arch == 'resnet50_dilated16': 133 | orig_resnet = resnet50(pretrained=pretrained) 134 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=16, multi_grid=multi_grid) 135 | 136 | elif arch == 'deepbase_resnet50': 137 | if pretrained: 138 | pretrained = 'models/backbones/pretrained/3x3resnet50-imagenet.pth' 139 | orig_resnet = deepbase_resnet50(pretrained=pretrained) 140 | arch_net = NormalResnetBackbone(orig_resnet) 141 | 142 | elif arch == 'deepbase_resnet50_dilated8': 143 | if pretrained: 144 | pretrained = 'models/backbones/pretrained/3x3resnet50-imagenet.pth' 145 | orig_resnet = deepbase_resnet50(pretrained=pretrained) 146 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=8, multi_grid=multi_grid) 147 | 148 | elif arch == 'deepbase_resnet50_dilated16': 149 | orig_resnet = deepbase_resnet50(pretrained=pretrained) 150 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=16, multi_grid=multi_grid) 151 | 152 | elif arch == 'resnet101': 153 | orig_resnet = resnet101(pretrained=pretrained) 154 | arch_net = NormalResnetBackbone(orig_resnet) 155 | 156 | elif arch == 'resnet101_dilated8': 157 | orig_resnet = resnet101(pretrained=pretrained) 158 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=8, multi_grid=multi_grid) 159 | 160 | elif arch == 'resnet101_dilated16': 161 | orig_resnet = resnet101(pretrained=pretrained) 162 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=16, multi_grid=multi_grid) 163 | 164 | elif arch == 'deepbase_resnet101': 165 | orig_resnet = deepbase_resnet101(pretrained=pretrained) 166 | arch_net = NormalResnetBackbone(orig_resnet) 167 | 168 | elif arch == 'deepbase_resnet101_dilated8': 169 | if pretrained: 170 | pretrained = 'models/backbones/pretrained/3x3resnet101-imagenet.pth' 171 | orig_resnet = deepbase_resnet101(pretrained=pretrained) 172 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=8, multi_grid=multi_grid) 173 | 174 | elif arch == 'deepbase_resnet101_dilated16': 175 | orig_resnet = deepbase_resnet101(pretrained=pretrained) 176 | arch_net = DilatedResnetBackbone(orig_resnet, dilate_scale=16, multi_grid=multi_grid) 177 | 178 | else: 179 | raise Exception('Architecture undefined!') 180 | 181 | return arch_net 182 | -------------------------------------------------------------------------------- /models/backbones/resnet_models.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding:utf-8 -*- 3 | # Author: Donny You(youansheng@gmail.com) 4 | 5 | 6 | import math 7 | import torch.nn as nn 8 | from collections import OrderedDict 9 | 10 | from models.backbones.module_helper import ModuleHelper 11 | 12 | 13 | model_urls = { 14 | 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', 15 | 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', 16 | 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', 17 | 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', 18 | 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', 19 | } 20 | 21 | 22 | def conv3x3(in_planes, out_planes, stride=1): 23 | "3x3 convolution with padding" 24 | return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, 25 | padding=1, bias=False) 26 | 27 | 28 | class BasicBlock(nn.Module): 29 | expansion = 1 30 | 31 | def __init__(self, inplanes, planes, stride=1, downsample=None, norm_type=None): 32 | super(BasicBlock, self).__init__() 33 | self.conv1 = conv3x3(inplanes, planes, stride) 34 | self.bn1 = ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes) 35 | self.relu = nn.ReLU(inplace=True) 36 | self.conv2 = conv3x3(planes, planes) 37 | self.bn2 = ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes) 38 | self.downsample = downsample 39 | self.stride = stride 40 | 41 | def forward(self, x): 42 | residual = x 43 | 44 | out = self.conv1(x) 45 | out = self.bn1(out) 46 | out = self.relu(out) 47 | 48 | out = self.conv2(out) 49 | out = self.bn2(out) 50 | 51 | if self.downsample is not None: 52 | residual = self.downsample(x) 53 | 54 | out += residual 55 | out = self.relu(out) 56 | 57 | return out 58 | 59 | 60 | class Bottleneck(nn.Module): 61 | expansion = 4 62 | 63 | def __init__(self, inplanes, planes, stride=1, downsample=None, norm_type=None): 64 | super(Bottleneck, self).__init__() 65 | self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) 66 | self.bn1 = ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes) 67 | self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, 68 | padding=1, bias=False) 69 | self.bn2 = ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes) 70 | self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) 71 | self.bn3 = ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes * 4) 72 | self.relu = nn.ReLU(inplace=True) 73 | self.downsample = downsample 74 | self.stride = stride 75 | 76 | def forward(self, x): 77 | residual = x 78 | 79 | out = self.conv1(x) 80 | out = self.bn1(out) 81 | out = self.relu(out) 82 | 83 | out = self.conv2(out) 84 | out = self.bn2(out) 85 | out = self.relu(out) 86 | 87 | out = self.conv3(out) 88 | out = self.bn3(out) 89 | 90 | if self.downsample is not None: 91 | residual = self.downsample(x) 92 | 93 | out += residual 94 | out = self.relu(out) 95 | 96 | return out 97 | 98 | 99 | class ResNet(nn.Module): 100 | 101 | def __init__(self, block, layers, num_classes=1000, deep_base=False, norm_type=None): 102 | super(ResNet, self).__init__() 103 | self.inplanes = 128 if deep_base else 64 104 | if deep_base: 105 | self.prefix = nn.Sequential(OrderedDict([ 106 | ('conv1', nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1, bias=False)), 107 | ('bn1', ModuleHelper.BatchNorm2d(norm_type=norm_type)(64)), 108 | ('relu1', nn.ReLU(inplace=False)), 109 | ('conv2', nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=False)), 110 | ('bn2', ModuleHelper.BatchNorm2d(norm_type=norm_type)(64)), 111 | ('relu2', nn.ReLU(inplace=False)), 112 | ('conv3', nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1, bias=False)), 113 | ('bn3', ModuleHelper.BatchNorm2d(norm_type=norm_type)(self.inplanes)), 114 | ('relu3', nn.ReLU(inplace=False))] 115 | )) 116 | else: 117 | self.prefix = nn.Sequential(OrderedDict([ 118 | ('conv1', nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)), 119 | ('bn1', ModuleHelper.BatchNorm2d(norm_type=norm_type)(self.inplanes)), 120 | ('relu', nn.ReLU(inplace=False))] 121 | )) 122 | 123 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1, ceil_mode=False) # change. 124 | 125 | self.layer1 = self._make_layer(block, 64, layers[0], norm_type=norm_type) 126 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_type=norm_type) 127 | self.layer3 = self._make_layer(block, 256, layers[2], stride=2, norm_type=norm_type) 128 | self.layer4 = self._make_layer(block, 512, layers[3], stride=2, norm_type=norm_type) 129 | self.avgpool = nn.AvgPool2d(7, stride=1) 130 | self.fc = nn.Linear(512 * block.expansion, num_classes) 131 | 132 | for m in self.modules(): 133 | if isinstance(m, nn.Conv2d): 134 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 135 | m.weight.data.normal_(0, math.sqrt(2. / n)) 136 | elif isinstance(m, ModuleHelper.BatchNorm2d(norm_type=norm_type, ret_cls=True)): 137 | m.weight.data.fill_(1) 138 | m.bias.data.zero_() 139 | 140 | def _make_layer(self, block, planes, blocks, stride=1, norm_type=None): 141 | downsample = None 142 | if stride != 1 or self.inplanes != planes * block.expansion: 143 | downsample = nn.Sequential( 144 | nn.Conv2d(self.inplanes, planes * block.expansion, 145 | kernel_size=1, stride=stride, bias=False), 146 | ModuleHelper.BatchNorm2d(norm_type=norm_type)(planes * block.expansion), 147 | ) 148 | 149 | layers = [] 150 | layers.append(block(self.inplanes, planes, stride, downsample, norm_type=norm_type)) 151 | self.inplanes = planes * block.expansion 152 | for i in range(1, blocks): 153 | layers.append(block(self.inplanes, planes, norm_type=norm_type)) 154 | 155 | return nn.Sequential(*layers) 156 | 157 | def forward(self, x): 158 | x = self.prefix(x) 159 | x = self.maxpool(x) 160 | 161 | x = self.layer1(x) 162 | x = self.layer2(x) 163 | x = self.layer3(x) 164 | x = self.layer4(x) 165 | 166 | x = self.avgpool(x) 167 | x = x.view(x.size(0), -1) 168 | x = self.fc(x) 169 | 170 | return x 171 | 172 | 173 | def resnet18(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 174 | """Constructs a ResNet-18 model. 175 | Args: 176 | pretrained (bool): If True, returns a model pre-trained on Places 177 | norm_type (str): choose norm type 178 | """ 179 | model = ResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes, deep_base=False, norm_type=norm_type) 180 | model = ModuleHelper.load_model(model, pretrained=pretrained) 181 | return model 182 | 183 | def deepbase_resnet18(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 184 | """Constructs a ResNet-18 model. 185 | Args: 186 | pretrained (bool): If True, returns a model pre-trained on Places 187 | """ 188 | model = ResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes, deep_base=True, norm_type=norm_type) 189 | model = ModuleHelper.load_model(model, pretrained=pretrained) 190 | return model 191 | 192 | def resnet34(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 193 | """Constructs a ResNet-34 model. 194 | Args: 195 | pretrained (bool): If True, returns a model pre-trained on Places 196 | """ 197 | model = ResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes, deep_base=False, norm_type=norm_type) 198 | model = ModuleHelper.load_model(model, pretrained=pretrained) 199 | return model 200 | 201 | def deepbase_resnet34(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 202 | """Constructs a ResNet-34 model. 203 | Args: 204 | pretrained (bool): If True, returns a model pre-trained on Places 205 | """ 206 | model = ResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes, deep_base=True, norm_type=norm_type) 207 | model = ModuleHelper.load_model(model, pretrained=pretrained) 208 | return model 209 | 210 | def resnet50(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 211 | """Constructs a ResNet-50 model. 212 | Args: 213 | pretrained (bool): If True, returns a model pre-trained on Places 214 | """ 215 | model = ResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes, deep_base=False, norm_type=norm_type) 216 | model = ModuleHelper.load_model(model, pretrained=pretrained) 217 | return model 218 | 219 | def deepbase_resnet50(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 220 | """Constructs a ResNet-50 model. 221 | Args: 222 | pretrained (bool): If True, returns a model pre-trained on Places 223 | """ 224 | model = ResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes, deep_base=True, norm_type=norm_type) 225 | model = ModuleHelper.load_model(model, pretrained=pretrained) 226 | return model 227 | 228 | def resnet101(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 229 | """Constructs a ResNet-101 model. 230 | Args: 231 | pretrained (bool): If True, returns a model pre-trained on Places 232 | """ 233 | model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes, deep_base=False, norm_type=norm_type) 234 | model = ModuleHelper.load_model(model, pretrained=pretrained) 235 | return model 236 | 237 | def deepbase_resnet101(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 238 | """Constructs a ResNet-101 model. 239 | Args: 240 | pretrained (bool): If True, returns a model pre-trained on Places 241 | """ 242 | model = ResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes, deep_base=True, norm_type=norm_type) 243 | model = ModuleHelper.load_model(model, pretrained=pretrained) 244 | return model 245 | 246 | def resnet152(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 247 | """Constructs a ResNet-152 model. 248 | 249 | Args: 250 | pretrained (bool): If True, returns a model pre-trained on Places 251 | """ 252 | model = ResNet(Bottleneck, [3, 8, 36, 3], num_classes=num_classes, deep_base=False, norm_type=norm_type) 253 | model = ModuleHelper.load_model(model, pretrained=pretrained) 254 | return model 255 | 256 | def deepbase_resnet152(num_classes=1000, pretrained=None, norm_type='batchnorm', **kwargs): 257 | """Constructs a ResNet-152 model. 258 | 259 | Args: 260 | pretrained (bool): If True, returns a model pre-trained on Places 261 | """ 262 | model = ResNet(Bottleneck, [3, 8, 36, 3], num_classes=num_classes, deep_base=True, norm_type=norm_type) 263 | model = ModuleHelper.load_model(model, pretrained=pretrained) 264 | return model 265 | -------------------------------------------------------------------------------- /models/base_model.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import torch.nn as nn 3 | import numpy as np 4 | 5 | class BaseModel(nn.Module): 6 | def __init__(self): 7 | super(BaseModel, self).__init__() 8 | self.logger = logging.getLogger(self.__class__.__name__) 9 | 10 | def forward(self): 11 | raise NotImplementedError 12 | 13 | def summary(self): 14 | model_parameters = filter(lambda p: p.requires_grad, self.parameters()) 15 | nbr_params = sum([np.prod(p.size()) for p in model_parameters]) 16 | self.logger.info(f'Nbr of trainable parameters: {nbr_params}') 17 | 18 | def __str__(self): 19 | model_parameters = filter(lambda p: p.requires_grad, self.parameters()) 20 | nbr_params = int(sum([np.prod(p.size()) for p in model_parameters])) 21 | return f'\nNbr of trainable parameters: {nbr_params}' 22 | #return super(BaseModel, self).__str__() + f'\nNbr of trainable parameters: {nbr_params}' 23 | -------------------------------------------------------------------------------- /models/decoders.py: -------------------------------------------------------------------------------- 1 | import math , time 2 | import torch 3 | import torch.nn.functional as F 4 | from torch import nn 5 | from itertools import chain 6 | import contextlib 7 | import random 8 | import numpy as np 9 | import cv2 10 | from torch.distributions.uniform import Uniform 11 | 12 | 13 | def icnr(x, scale=2, init=nn.init.kaiming_normal_): 14 | """ 15 | Checkerboard artifact free sub-pixel convolution 16 | https://arxiv.org/abs/1707.02937 17 | """ 18 | ni,nf,h,w = x.shape 19 | ni2 = int(ni/(scale**2)) 20 | k = init(torch.zeros([ni2,nf,h,w])).transpose(0, 1) 21 | k = k.contiguous().view(ni2, nf, -1) 22 | k = k.repeat(1, 1, scale**2) 23 | k = k.contiguous().view([nf,ni,h,w]).transpose(0, 1) 24 | x.data.copy_(k) 25 | 26 | 27 | class PixelShuffle(nn.Module): 28 | """ 29 | Real-Time Single Image and Video Super-Resolution 30 | https://arxiv.org/abs/1609.05158 31 | """ 32 | def __init__(self, n_channels, scale): 33 | super(PixelShuffle, self).__init__() 34 | self.conv = nn.Conv2d(n_channels, n_channels*(scale**2), kernel_size=1) 35 | icnr(self.conv.weight) 36 | self.shuf = nn.PixelShuffle(scale) 37 | self.relu = nn.ReLU(inplace=True) 38 | 39 | def forward(self,x): 40 | x = self.shuf(self.relu(self.conv(x))) 41 | return x 42 | 43 | 44 | # def upsample(in_channels, out_channels, upscale, kernel_size=3): 45 | # # A series of x 2 upsamling until we get to the upscale we want 46 | # layers = [] 47 | # conv1x1 = nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False) 48 | # nn.init.kaiming_normal_(conv1x1.weight.data, nonlinearity='relu') 49 | # layers.append(conv1x1) 50 | # for i in range(int(math.log(upscale, 2))): 51 | # layers.append(PixelShuffle(out_channels, scale=2)) 52 | # return nn.Sequential(*layers) 53 | 54 | def upsample(in_channels, out_channels, upscale, kernel_size=3): 55 | layers = [] 56 | 57 | # Middle channels 58 | mid_channels = 32 59 | 60 | # First conv layer to reduce number of channels 61 | diff_conv1x1 = nn.Conv2d(in_channels, mid_channels, kernel_size=kernel_size, padding=1, bias=False) 62 | nn.init.kaiming_normal_(diff_conv1x1.weight.data, nonlinearity='relu') 63 | layers.append(diff_conv1x1) 64 | 65 | # ReLU 66 | diff_relu = nn.ReLU() 67 | layers.append(diff_relu) 68 | 69 | # Upsampling to original size 70 | up = nn.Upsample(scale_factor=upscale, mode='bilinear') 71 | layers.append(up) 72 | 73 | # Classification layer 74 | conv1x1 = nn.Conv2d(mid_channels, out_channels, kernel_size=1, bias=False) 75 | nn.init.kaiming_normal_(conv1x1.weight.data, nonlinearity='relu') 76 | layers.append(conv1x1) 77 | 78 | return nn.Sequential(*layers) 79 | 80 | 81 | class MainDecoder(nn.Module): 82 | def __init__(self, upscale, conv_in_ch, num_classes): 83 | super(MainDecoder, self).__init__() 84 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 85 | 86 | def forward(self, x): 87 | x = self.upsample(x) 88 | return x 89 | 90 | 91 | class DropOutDecoder(nn.Module): 92 | def __init__(self, upscale, conv_in_ch, num_classes, drop_rate=0.3, spatial_dropout=True): 93 | super(DropOutDecoder, self).__init__() 94 | self.dropout = nn.Dropout2d(p=drop_rate) if spatial_dropout else nn.Dropout(drop_rate) 95 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 96 | 97 | def forward(self, x, _, pertub=True): 98 | if pertub: 99 | x = self.upsample(self.dropout(x)) 100 | else: 101 | x = self.upsample(x) 102 | return x 103 | 104 | 105 | class FeatureDropDecoder(nn.Module): 106 | def __init__(self, upscale, conv_in_ch, num_classes): 107 | super(FeatureDropDecoder, self).__init__() 108 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 109 | 110 | def feature_dropout(self, x): 111 | attention = torch.mean(x, dim=1, keepdim=True) 112 | max_val, _ = torch.max(attention.view(x.size(0), -1), dim=1, keepdim=True) 113 | threshold = max_val * np.random.uniform(0.7, 0.9) 114 | threshold = threshold.view(x.size(0), 1, 1, 1).expand_as(attention) 115 | drop_mask = (attention < threshold).float() 116 | return x.mul(drop_mask) 117 | 118 | def forward(self, x, _, pertub=True): 119 | if pertub: 120 | x = self.feature_dropout(x) 121 | x = self.upsample(x) 122 | else: 123 | x = self.upsample(x) 124 | return x 125 | 126 | 127 | class FeatureNoiseDecoder(nn.Module): 128 | def __init__(self, upscale, conv_in_ch, num_classes, uniform_range=0.3): 129 | super(FeatureNoiseDecoder, self).__init__() 130 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 131 | self.uni_dist = Uniform(-uniform_range, uniform_range) 132 | 133 | def feature_based_noise(self, x): 134 | noise_vector = self.uni_dist.sample(x.shape[1:]).to(x.device).unsqueeze(0) 135 | x_noise = x.mul(noise_vector) + x 136 | return x_noise 137 | 138 | def forward(self, x, _, pertub=True): 139 | if pertub: 140 | x = self.feature_based_noise(x) 141 | x = self.upsample(x) 142 | else: 143 | x = self.upsample(x) 144 | return x 145 | 146 | 147 | def _l2_normalize(d): 148 | # Normalizing per batch axis 149 | d_reshaped = d.view(d.shape[0], -1, *(1 for _ in range(d.dim() - 2))) 150 | d /= torch.norm(d_reshaped, dim=1, keepdim=True) + 1e-8 151 | return d 152 | 153 | 154 | def get_r_adv(x, decoder, it=1, xi=1e-1, eps=10.0): 155 | """ 156 | Virtual Adversarial Training 157 | https://arxiv.org/abs/1704.03976 158 | """ 159 | x_detached = x.detach() 160 | with torch.no_grad(): 161 | pred = F.softmax(decoder(x_detached), dim=1) 162 | 163 | d = torch.rand(x.shape).sub(0.5).to(x.device) 164 | d = _l2_normalize(d) 165 | 166 | for _ in range(it): 167 | d.requires_grad_() 168 | pred_hat = decoder(x_detached + xi * d) 169 | logp_hat = F.log_softmax(pred_hat, dim=1) 170 | adv_distance = F.kl_div(logp_hat, pred, reduction='batchmean') 171 | adv_distance.backward() 172 | d = _l2_normalize(d.grad) 173 | decoder.zero_grad() 174 | 175 | r_adv = d * eps 176 | return r_adv 177 | 178 | 179 | class VATDecoder(nn.Module): 180 | def __init__(self, upscale, conv_in_ch, num_classes, xi=1e-1, eps=10.0, iterations=1): 181 | super(VATDecoder, self).__init__() 182 | self.xi = xi 183 | self.eps = eps 184 | self.it = iterations 185 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 186 | 187 | def forward(self, x, _, pertub=True): 188 | if pertub: 189 | r_adv = get_r_adv(x, self.upsample, self.it, self.xi, self.eps) 190 | x = self.upsample(x + r_adv) 191 | else: 192 | x = self.upsample(x) 193 | return x 194 | 195 | 196 | def guided_cutout(output, upscale, resize, erase=0.4, use_dropout=False): 197 | if len(output.shape) == 3: 198 | masks = (output > 0).float() 199 | else: 200 | masks = (output.argmax(1) > 0).float() 201 | 202 | if use_dropout: 203 | p_drop = random.randint(3, 6)/10 204 | maskdroped = (F.dropout(masks, p_drop) > 0).float() 205 | maskdroped = maskdroped + (1 - masks) 206 | maskdroped.unsqueeze_(0) 207 | maskdroped = F.interpolate(maskdroped, size=resize, mode='nearest') 208 | 209 | masks_np = [] 210 | for mask in masks: 211 | mask_np = np.uint8(mask.cpu().numpy()) 212 | mask_ones = np.ones_like(mask_np) 213 | try: # Version 3.x 214 | _, contours, _ = cv2.findContours(mask_np, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) 215 | except: # Version 4.x 216 | contours, _ = cv2.findContours(mask_np, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) 217 | 218 | polys = [c.reshape(c.shape[0], c.shape[-1]) for c in contours if c.shape[0] > 50] 219 | for poly in polys: 220 | min_w, max_w = poly[:, 0].min(), poly[:, 0].max() 221 | min_h, max_h = poly[:, 1].min(), poly[:, 1].max() 222 | bb_w, bb_h = max_w-min_w, max_h-min_h 223 | rnd_start_w = random.randint(0, int(bb_w*(1-erase))) 224 | rnd_start_h = random.randint(0, int(bb_h*(1-erase))) 225 | h_start, h_end = min_h+rnd_start_h, min_h+rnd_start_h+int(bb_h*erase) 226 | w_start, w_end = min_w+rnd_start_w, min_w+rnd_start_w+int(bb_w*erase) 227 | mask_ones[h_start:h_end, w_start:w_end] = 0 228 | masks_np.append(mask_ones) 229 | masks_np = np.stack(masks_np) 230 | 231 | maskcut = torch.from_numpy(masks_np).float().unsqueeze_(1) 232 | maskcut = F.interpolate(maskcut, size=resize, mode='nearest') 233 | 234 | if use_dropout: 235 | return maskcut.to(output.device), maskdroped.to(output.device) 236 | return maskcut.to(output.device) 237 | 238 | 239 | class CutOutDecoder(nn.Module): 240 | def __init__(self, upscale, conv_in_ch, num_classes, drop_rate=0.3, spatial_dropout=True, erase=0.4): 241 | super(CutOutDecoder, self).__init__() 242 | self.erase = erase 243 | self.upscale = upscale 244 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 245 | 246 | def forward(self, x, pred=None, pertub=True): 247 | if pertub: 248 | maskcut = guided_cutout(pred, upscale=self.upscale, erase=self.erase, resize=(x.size(2), x.size(3))) 249 | x = x * maskcut 250 | x = self.upsample(x) 251 | else: 252 | x = self.upsample(x) 253 | return x 254 | 255 | 256 | def guided_masking(x, output, upscale, resize, return_msk_context=True): 257 | if len(output.shape) == 3: 258 | masks_context = (output > 0).float().unsqueeze(1) 259 | else: 260 | masks_context = (output.argmax(1) > 0).float().unsqueeze(1) 261 | 262 | masks_context = F.interpolate(masks_context, size=resize, mode='nearest') 263 | 264 | x_masked_context = masks_context * x 265 | if return_msk_context: 266 | return x_masked_context 267 | 268 | masks_objects = (1 - masks_context) 269 | x_masked_objects = masks_objects * x 270 | return x_masked_objects 271 | 272 | 273 | class ContextMaskingDecoder(nn.Module): 274 | def __init__(self, upscale, conv_in_ch, num_classes): 275 | super(ContextMaskingDecoder, self).__init__() 276 | self.upscale = upscale 277 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 278 | 279 | def forward(self, x, pred=None, pertub=True): 280 | if pertub: 281 | x_masked_context = guided_masking(x, pred, resize=(x.size(2), x.size(3)), 282 | upscale=self.upscale, return_msk_context=True) 283 | x_masked_context = self.upsample(x_masked_context) 284 | else: 285 | x_masked_context = self.upsample(x) 286 | return x_masked_context 287 | 288 | 289 | class ObjectMaskingDecoder(nn.Module): 290 | def __init__(self, upscale, conv_in_ch, num_classes): 291 | super(ObjectMaskingDecoder, self).__init__() 292 | self.upscale = upscale 293 | self.upsample = upsample(conv_in_ch, num_classes, upscale=upscale) 294 | 295 | def forward(self, x, pred=None, pertub=True): 296 | if pertub: 297 | x_masked_obj = guided_masking(x, pred, resize=(x.size(2), x.size(3)), 298 | upscale=self.upscale, return_msk_context=False) 299 | x_masked_obj = self.upsample(x_masked_obj) 300 | else: 301 | x_masked_obj = self.upsample(x) 302 | return x_masked_obj 303 | 304 | -------------------------------------------------------------------------------- /models/encoder.py: -------------------------------------------------------------------------------- 1 | from models.backbones.resnet_backbone import ResNetBackbone 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import os 6 | 7 | resnet50 = { 8 | "path": "models/backbones/pretrained/3x3resnet50-imagenet.pth", 9 | } 10 | 11 | 12 | class _PSPModule(nn.Module): 13 | def __init__(self, in_channels, bin_sizes): 14 | super(_PSPModule, self).__init__() 15 | 16 | out_channels = in_channels // len(bin_sizes) 17 | self.stages = nn.ModuleList([self._make_stages(in_channels, out_channels, b_s) for b_s in bin_sizes]) 18 | self.bottleneck = nn.Sequential( 19 | nn.Conv2d(in_channels+(out_channels * len(bin_sizes)), out_channels, 20 | kernel_size=3, padding=1, bias=False), 21 | nn.BatchNorm2d(out_channels), 22 | nn.ReLU(inplace=True) 23 | ) 24 | 25 | def _make_stages(self, in_channels, out_channels, bin_sz): 26 | prior = nn.AdaptiveAvgPool2d(output_size=bin_sz) 27 | conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False) 28 | bn = nn.BatchNorm2d(out_channels) 29 | relu = nn.ReLU(inplace=True) 30 | return nn.Sequential(prior, conv, bn, relu) 31 | 32 | def forward(self, features): 33 | h, w = features.size()[2], features.size()[3] 34 | pyramids = [features] 35 | pyramids.extend([F.interpolate(stage(features), size=(h, w), mode='bilinear', 36 | align_corners=False) for stage in self.stages]) 37 | output = self.bottleneck(torch.cat(pyramids, dim=1)) 38 | return output 39 | 40 | 41 | class Encoder(nn.Module): 42 | def __init__(self, pretrained): 43 | super(Encoder, self).__init__() 44 | 45 | if pretrained and not os.path.isfile(resnet50["path"]): 46 | print("Downloading pretrained resnet (source : https://github.com/donnyyou/torchcv)") 47 | os.system('sh models/backbones/get_pretrained_model.sh') 48 | 49 | model = ResNetBackbone(backbone='deepbase_resnet50_dilated8', pretrained=pretrained) 50 | self.base = nn.Sequential( 51 | nn.Sequential(model.prefix, model.maxpool), 52 | model.layer1, 53 | model.layer2, 54 | model.layer3, 55 | model.layer4 56 | ) 57 | self.psp = _PSPModule(2048, bin_sizes=[1, 2, 3, 6]) 58 | 59 | def forward(self, A, B): 60 | a = self.base(A) 61 | b = self.base(B) 62 | diff = torch.abs(a-b) 63 | x = self.psp(diff) 64 | return x 65 | 66 | def get_backbone_params(self): 67 | return self.base.parameters() 68 | 69 | def get_module_params(self): 70 | return self.psp.parameters() 71 | --------------------------------------------------------------------------------