├── .gitmodules ├── README.md └── imagenet_model ├── Attention-56-deploy.prototxt └── AttentionNeXt-56-deploy.prototxt /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "caffe"] 2 | path = caffe 3 | url = https://github.com/buptwangfei/caffe.git 4 | branch = master 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Residual Attention Network 2 | Residual Attention Network for Image Classification (**CVPR-2017 Spotlight**) 3 | 4 | By Fei Wang, Mengqing Jiang, Chen Qian, Shuo Yang, Chen Li, Honggang Zhang, Xiaogang Wang, Xiaoou Tang 5 | 6 | 7 | ### Introduction 8 | **Residual Attention Network** is a convolutional neural network using attention mechanism which can incorporate with state-of-the-art feed forward network architecture in an end-to-end training fashion. 9 | 10 | **Residual Attention Networks** are described in the paper "Residual Attention Network for Image Classification"(https://arxiv.org/pdf/1704.06904.pdf). 11 | 12 | This repository contains the prototxts of "Residual Attention Network". The trained model will be released soon. 13 | 14 | ### Citation 15 | If you find "Residual Attention Network" useful in your research, please cite: 16 | 17 | @article{wang2017residual, 18 | title={Residual Attention Network for Image Classification}, 19 | author={Wang, Fei and Jiang, Mengqing and Qian, Chen and Yang, Shuo and Li, Cheng and Zhang, Honggang and Wang, Xiaogang and Tang, Xiaoou}, 20 | journal={arXiv preprint arXiv:1704.06904}, 21 | year={2017} 22 | } 23 | 24 | ### Models 25 | 0. Attention-56 and Attention-92 are based on the pre-activation residual unit. 26 | 27 | 1. According to the paper, we replace pre-activation residual unit with resnext unit to contruct the AttentionNeXt-56 and AttentionNeXt-92. 28 | 29 | 30 | 31 | ### Main Performance 32 | 33 | 0. Evaluation on ImageNet validation dataset. 34 | 35 | | Network |Test Size| top-1 | top-5 | 36 | |------------------|---------|---------|---------| 37 | | Attention-56 | 224\*224| 21.76% | 5.9% | 38 | | AttentionNeXt-56 | 224\*224| 21.2% | 5.6% | 39 | | Attention-92 | 320\*320| 19.5% | 4.8% | 40 | 41 | 42 | ### Note 43 | 0. We use Caffe ("https://github.com/buptwangfei/caffe") to train our Residual Attention Networks. 44 | 45 | 1. We follow the implementation of BN layer from "https://github.com/happynear/caffe-windows.git", which merge computations of mean, variance, scale and shift into one layer. We use moving average in the training stage. 46 | 47 | 3. The scale augmentation and ratio augmentation are used in the training process. 48 | 49 | 4. The mini-batch of per GPU should be at least 32 images. 50 | 51 | 5. If you want to train Residual Attention Network, you should use my caffe code and add data augmentation described in the paper. I think it is easy to reproduce the performance on the ImageNet validation dataset. 52 | -------------------------------------------------------------------------------- /imagenet_model/Attention-56-deploy.prototxt: -------------------------------------------------------------------------------- 1 | name: "Attention-56" 2 | input: "data" 3 | input_dim: 1 4 | input_dim: 3 5 | input_dim: 224 6 | input_dim: 224 7 | 8 | layer{ 9 | name: "conv1" 10 | type: "Convolution" 11 | bottom: "data" 12 | top: "conv1" 13 | convolution_param { 14 | num_output: 64 15 | pad: 3 16 | kernel_size: 7 17 | stride: 2 18 | bias_term: false 19 | } 20 | } 21 | 22 | layer{ 23 | name: "conv1/bn" 24 | type: "BN" 25 | bottom: "conv1" 26 | top: "conv1/bn" 27 | bn_param { 28 | frozen: true 29 | } 30 | } 31 | 32 | layer{ 33 | name: "conv1/bn/relu" 34 | type: "ReLU" 35 | bottom: "conv1/bn" 36 | top: "conv1/bn" 37 | } 38 | 39 | layer{ 40 | name: "pool1_3x3_s2" 41 | type: "Pooling" 42 | bottom: "conv1/bn" 43 | top: "pool1_3x3_s2" 44 | pooling_param { 45 | pool: MAX 46 | kernel_size: 3 47 | stride: 2 48 | } 49 | } 50 | 51 | layer{ 52 | name: "pool1_3x3_s2/bn" 53 | type: "BN" 54 | bottom: "pool1_3x3_s2" 55 | top: "pool1_3x3_s2/bn" 56 | bn_param { 57 | frozen: true 58 | } 59 | } 60 | 61 | layer{ 62 | name: "pool1_3x3_s2/bn/relu" 63 | type: "ReLU" 64 | bottom: "pool1_3x3_s2/bn" 65 | top: "pool1_3x3_s2/bn" 66 | } 67 | 68 | layer{ 69 | name: "pre_res_1/branch1/conv1_1x1" 70 | type: "Convolution" 71 | bottom: "pool1_3x3_s2/bn" 72 | top: "pre_res_1/branch1/conv1_1x1" 73 | convolution_param { 74 | num_output: 64 75 | pad: 0 76 | kernel_size: 1 77 | stride: 1 78 | bias_term: false 79 | } 80 | } 81 | 82 | layer{ 83 | name: "pre_res_1/branch1/conv1_1x1/bn" 84 | type: "BN" 85 | bottom: "pre_res_1/branch1/conv1_1x1" 86 | top: "pre_res_1/branch1/conv1_1x1/bn" 87 | bn_param { 88 | frozen: true 89 | } 90 | } 91 | 92 | layer{ 93 | name: "pre_res_1/branch1/conv1_1x1/bn/relu" 94 | type: "ReLU" 95 | bottom: "pre_res_1/branch1/conv1_1x1/bn" 96 | top: "pre_res_1/branch1/conv1_1x1/bn" 97 | } 98 | 99 | layer{ 100 | name: "pre_res_1/branch1/conv2_3x3" 101 | type: "Convolution" 102 | bottom: "pre_res_1/branch1/conv1_1x1/bn" 103 | top: "pre_res_1/branch1/conv2_3x3" 104 | convolution_param { 105 | num_output: 64 106 | pad: 1 107 | kernel_size: 3 108 | stride: 1 109 | bias_term: false 110 | } 111 | } 112 | 113 | layer{ 114 | name: "pre_res_1/branch1/conv2_3x3/bn" 115 | type: "BN" 116 | bottom: "pre_res_1/branch1/conv2_3x3" 117 | top: "pre_res_1/branch1/conv2_3x3/bn" 118 | bn_param { 119 | frozen: true 120 | } 121 | } 122 | 123 | layer{ 124 | name: "pre_res_1/branch1/conv2_3x3/bn/relu" 125 | type: "ReLU" 126 | bottom: "pre_res_1/branch1/conv2_3x3/bn" 127 | top: "pre_res_1/branch1/conv2_3x3/bn" 128 | } 129 | 130 | layer{ 131 | name: "pre_res_1/branch1/conv3_1x1" 132 | type: "Convolution" 133 | bottom: "pre_res_1/branch1/conv2_3x3/bn" 134 | top: "pre_res_1/branch1/conv3_1x1" 135 | convolution_param { 136 | num_output: 256 137 | pad: 0 138 | kernel_size: 1 139 | stride: 1 140 | bias_term: false 141 | } 142 | } 143 | 144 | layer{ 145 | name: "pre_res_1/branch2/conv1_1x1" 146 | type: "Convolution" 147 | bottom: "pool1_3x3_s2/bn" 148 | top: "pre_res_1/branch2/conv1_1x1" 149 | convolution_param { 150 | num_output: 256 151 | pad: 0 152 | kernel_size: 1 153 | stride: 1 154 | bias_term: false 155 | } 156 | } 157 | 158 | layer{ 159 | name: "pre_res_1" 160 | type: "Eltwise" 161 | bottom: "pre_res_1/branch2/conv1_1x1" 162 | bottom: "pre_res_1/branch1/conv3_1x1" 163 | top: "pre_res_1" 164 | eltwise_param { 165 | operation: SUM 166 | } 167 | } 168 | 169 | layer{ 170 | name: "pre_res_1/bn" 171 | type: "BN" 172 | bottom: "pre_res_1" 173 | top: "pre_res_1/bn" 174 | bn_param { 175 | frozen: true 176 | } 177 | } 178 | 179 | layer{ 180 | name: "pre_res_1/bn/relu" 181 | type: "ReLU" 182 | bottom: "pre_res_1/bn" 183 | top: "pre_res_1/bn" 184 | } 185 | 186 | layer{ 187 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 188 | type: "Convolution" 189 | bottom: "pre_res_1/bn" 190 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 191 | convolution_param { 192 | num_output: 64 193 | pad: 0 194 | kernel_size: 1 195 | stride: 1 196 | bias_term: false 197 | } 198 | } 199 | 200 | layer{ 201 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 202 | type: "BN" 203 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 204 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 205 | bn_param { 206 | frozen: true 207 | } 208 | } 209 | 210 | layer{ 211 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn/relu" 212 | type: "ReLU" 213 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 214 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 215 | } 216 | 217 | layer{ 218 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 219 | type: "Convolution" 220 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 221 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 222 | convolution_param { 223 | num_output: 64 224 | pad: 1 225 | kernel_size: 3 226 | stride: 1 227 | bias_term: false 228 | } 229 | } 230 | 231 | layer{ 232 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 233 | type: "BN" 234 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 235 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 236 | bn_param { 237 | frozen: true 238 | } 239 | } 240 | 241 | layer{ 242 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn/relu" 243 | type: "ReLU" 244 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 245 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 246 | } 247 | 248 | layer{ 249 | name: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 250 | type: "Convolution" 251 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 252 | top: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 253 | convolution_param { 254 | num_output: 256 255 | pad: 0 256 | kernel_size: 1 257 | stride: 1 258 | bias_term: false 259 | } 260 | } 261 | 262 | layer{ 263 | name: "AttentionA_1/trunk/res1" 264 | type: "Eltwise" 265 | bottom: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 266 | bottom: "pre_res_1" 267 | top: "AttentionA_1/trunk/res1" 268 | eltwise_param { 269 | operation: SUM 270 | } 271 | } 272 | 273 | layer{ 274 | name: "AttentionA_1/trunk/res1/bn" 275 | type: "BN" 276 | bottom: "AttentionA_1/trunk/res1" 277 | top: "AttentionA_1/trunk/res1/bn" 278 | bn_param { 279 | frozen: true 280 | } 281 | } 282 | 283 | layer{ 284 | name: "AttentionA_1/trunk/res1/bn/relu" 285 | type: "ReLU" 286 | bottom: "AttentionA_1/trunk/res1/bn" 287 | top: "AttentionA_1/trunk/res1/bn" 288 | } 289 | 290 | layer{ 291 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 292 | type: "Convolution" 293 | bottom: "AttentionA_1/trunk/res1/bn" 294 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 295 | convolution_param { 296 | num_output: 64 297 | pad: 0 298 | kernel_size: 1 299 | stride: 1 300 | bias_term: false 301 | } 302 | } 303 | 304 | layer{ 305 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 306 | type: "BN" 307 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 308 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 309 | bn_param { 310 | frozen: true 311 | } 312 | } 313 | 314 | layer{ 315 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn/relu" 316 | type: "ReLU" 317 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 318 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 319 | } 320 | 321 | layer{ 322 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 323 | type: "Convolution" 324 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 325 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 326 | convolution_param { 327 | num_output: 64 328 | pad: 1 329 | kernel_size: 3 330 | stride: 1 331 | bias_term: false 332 | } 333 | } 334 | 335 | layer{ 336 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 337 | type: "BN" 338 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 339 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 340 | bn_param { 341 | frozen: true 342 | } 343 | } 344 | 345 | layer{ 346 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn/relu" 347 | type: "ReLU" 348 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 349 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 350 | } 351 | 352 | layer{ 353 | name: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 354 | type: "Convolution" 355 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 356 | top: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 357 | convolution_param { 358 | num_output: 256 359 | pad: 0 360 | kernel_size: 1 361 | stride: 1 362 | bias_term: false 363 | } 364 | } 365 | 366 | layer{ 367 | name: "AttentionA_1/trunk/res2" 368 | type: "Eltwise" 369 | bottom: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 370 | bottom: "AttentionA_1/trunk/res1" 371 | top: "AttentionA_1/trunk/res2" 372 | eltwise_param { 373 | operation: SUM 374 | } 375 | } 376 | 377 | layer{ 378 | name: "AttentionA_1/trunk/res2/bn" 379 | type: "BN" 380 | bottom: "AttentionA_1/trunk/res2" 381 | top: "AttentionA_1/trunk/res2/bn" 382 | bn_param { 383 | frozen: true 384 | } 385 | } 386 | 387 | layer{ 388 | name: "AttentionA_1/trunk/res2/bn/relu" 389 | type: "ReLU" 390 | bottom: "AttentionA_1/trunk/res2/bn" 391 | top: "AttentionA_1/trunk/res2/bn" 392 | } 393 | 394 | layer{ 395 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 396 | type: "Convolution" 397 | bottom: "AttentionA_1/trunk/res2/bn" 398 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 399 | convolution_param { 400 | num_output: 64 401 | pad: 0 402 | kernel_size: 1 403 | stride: 1 404 | bias_term: false 405 | } 406 | } 407 | 408 | layer{ 409 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 410 | type: "BN" 411 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 412 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 413 | bn_param { 414 | frozen: true 415 | } 416 | } 417 | 418 | layer{ 419 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn/relu" 420 | type: "ReLU" 421 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 422 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 423 | } 424 | 425 | layer{ 426 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 427 | type: "Convolution" 428 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 429 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 430 | convolution_param { 431 | num_output: 64 432 | pad: 1 433 | kernel_size: 3 434 | stride: 1 435 | bias_term: false 436 | } 437 | } 438 | 439 | layer{ 440 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 441 | type: "BN" 442 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 443 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 444 | bn_param { 445 | frozen: true 446 | } 447 | } 448 | 449 | layer{ 450 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn/relu" 451 | type: "ReLU" 452 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 453 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 454 | } 455 | 456 | layer{ 457 | name: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 458 | type: "Convolution" 459 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 460 | top: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 461 | convolution_param { 462 | num_output: 256 463 | pad: 0 464 | kernel_size: 1 465 | stride: 1 466 | bias_term: false 467 | } 468 | } 469 | 470 | layer{ 471 | name: "AttentionA_1/trunk/res3" 472 | type: "Eltwise" 473 | bottom: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 474 | bottom: "AttentionA_1/trunk/res2" 475 | top: "AttentionA_1/trunk/res3" 476 | eltwise_param { 477 | operation: SUM 478 | } 479 | } 480 | 481 | layer{ 482 | name: "AttentionA_1/mask/down_sample/pool1_3x3_s2" 483 | type: "Pooling" 484 | bottom: "AttentionA_1/trunk/res1" 485 | top: "AttentionA_1/mask/down_sample/pool1_3x3_s2" 486 | pooling_param { 487 | pool: MAX 488 | kernel_size: 3 489 | stride: 2 490 | } 491 | } 492 | 493 | layer{ 494 | name: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn" 495 | type: "BN" 496 | bottom: "AttentionA_1/mask/down_sample/pool1_3x3_s2" 497 | top: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn" 498 | bn_param { 499 | frozen: true 500 | } 501 | } 502 | 503 | layer{ 504 | name: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn/relu" 505 | type: "ReLU" 506 | bottom: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn" 507 | top: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn" 508 | } 509 | 510 | layer{ 511 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1" 512 | type: "Convolution" 513 | bottom: "AttentionA_1/mask/down_sample/pool1_3x3_s2/bn" 514 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1" 515 | convolution_param { 516 | num_output: 64 517 | pad: 0 518 | kernel_size: 1 519 | stride: 1 520 | bias_term: false 521 | } 522 | } 523 | 524 | layer{ 525 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 526 | type: "BN" 527 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1" 528 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 529 | bn_param { 530 | frozen: true 531 | } 532 | } 533 | 534 | layer{ 535 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn/relu" 536 | type: "ReLU" 537 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 538 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 539 | } 540 | 541 | layer{ 542 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3" 543 | type: "Convolution" 544 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 545 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3" 546 | convolution_param { 547 | num_output: 64 548 | pad: 1 549 | kernel_size: 3 550 | stride: 1 551 | bias_term: false 552 | } 553 | } 554 | 555 | layer{ 556 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 557 | type: "BN" 558 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3" 559 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 560 | bn_param { 561 | frozen: true 562 | } 563 | } 564 | 565 | layer{ 566 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn/relu" 567 | type: "ReLU" 568 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 569 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 570 | } 571 | 572 | layer{ 573 | name: "AttentionA_1/mask/down_sample/res1_1/branch1/conv3_1x1" 574 | type: "Convolution" 575 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 576 | top: "AttentionA_1/mask/down_sample/res1_1/branch1/conv3_1x1" 577 | convolution_param { 578 | num_output: 256 579 | pad: 0 580 | kernel_size: 1 581 | stride: 1 582 | bias_term: false 583 | } 584 | } 585 | 586 | layer{ 587 | name: "AttentionA_1/mask/down_sample/res1_1" 588 | type: "Eltwise" 589 | bottom: "AttentionA_1/mask/down_sample/res1_1/branch1/conv3_1x1" 590 | bottom: "AttentionA_1/mask/down_sample/pool1_3x3_s2" 591 | top: "AttentionA_1/mask/down_sample/res1_1" 592 | eltwise_param { 593 | operation: SUM 594 | } 595 | } 596 | 597 | layer{ 598 | name: "AttentionA_1/mask/down_sample/pool2_3x3_s2" 599 | type: "Pooling" 600 | bottom: "AttentionA_1/mask/down_sample/res1_1" 601 | top: "AttentionA_1/mask/down_sample/pool2_3x3_s2" 602 | pooling_param { 603 | pool: MAX 604 | kernel_size: 3 605 | stride: 2 606 | } 607 | } 608 | 609 | layer{ 610 | name: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn" 611 | type: "BN" 612 | bottom: "AttentionA_1/mask/down_sample/pool2_3x3_s2" 613 | top: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn" 614 | bn_param { 615 | frozen: true 616 | } 617 | } 618 | 619 | layer{ 620 | name: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn/relu" 621 | type: "ReLU" 622 | bottom: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn" 623 | top: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn" 624 | } 625 | 626 | layer{ 627 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1" 628 | type: "Convolution" 629 | bottom: "AttentionA_1/mask/down_sample/pool2_3x3_s2/bn" 630 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1" 631 | convolution_param { 632 | num_output: 64 633 | pad: 0 634 | kernel_size: 1 635 | stride: 1 636 | bias_term: false 637 | } 638 | } 639 | 640 | layer{ 641 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 642 | type: "BN" 643 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1" 644 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 645 | bn_param { 646 | frozen: true 647 | } 648 | } 649 | 650 | layer{ 651 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn/relu" 652 | type: "ReLU" 653 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 654 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 655 | } 656 | 657 | layer{ 658 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3" 659 | type: "Convolution" 660 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 661 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3" 662 | convolution_param { 663 | num_output: 64 664 | pad: 1 665 | kernel_size: 3 666 | stride: 1 667 | bias_term: false 668 | } 669 | } 670 | 671 | layer{ 672 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 673 | type: "BN" 674 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3" 675 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 676 | bn_param { 677 | frozen: true 678 | } 679 | } 680 | 681 | layer{ 682 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn/relu" 683 | type: "ReLU" 684 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 685 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 686 | } 687 | 688 | layer{ 689 | name: "AttentionA_1/mask/down_sample/res2_1/branch1/conv3_1x1" 690 | type: "Convolution" 691 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 692 | top: "AttentionA_1/mask/down_sample/res2_1/branch1/conv3_1x1" 693 | convolution_param { 694 | num_output: 256 695 | pad: 0 696 | kernel_size: 1 697 | stride: 1 698 | bias_term: false 699 | } 700 | } 701 | 702 | layer{ 703 | name: "AttentionA_1/mask/down_sample/res2_1" 704 | type: "Eltwise" 705 | bottom: "AttentionA_1/mask/down_sample/res2_1/branch1/conv3_1x1" 706 | bottom: "AttentionA_1/mask/down_sample/pool2_3x3_s2" 707 | top: "AttentionA_1/mask/down_sample/res2_1" 708 | eltwise_param { 709 | operation: SUM 710 | } 711 | } 712 | 713 | layer{ 714 | name: "AttentionA_1/mask/down_sample/pool3_3x3_s2" 715 | type: "Pooling" 716 | bottom: "AttentionA_1/mask/down_sample/res2_1" 717 | top: "AttentionA_1/mask/down_sample/pool3_3x3_s2" 718 | pooling_param { 719 | pool: MAX 720 | kernel_size: 3 721 | stride: 2 722 | } 723 | } 724 | 725 | layer{ 726 | name: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn" 727 | type: "BN" 728 | bottom: "AttentionA_1/mask/down_sample/pool3_3x3_s2" 729 | top: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn" 730 | bn_param { 731 | frozen: true 732 | } 733 | } 734 | 735 | layer{ 736 | name: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn/relu" 737 | type: "ReLU" 738 | bottom: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn" 739 | top: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn" 740 | } 741 | 742 | layer{ 743 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1" 744 | type: "Convolution" 745 | bottom: "AttentionA_1/mask/down_sample/pool3_3x3_s2/bn" 746 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1" 747 | convolution_param { 748 | num_output: 64 749 | pad: 0 750 | kernel_size: 1 751 | stride: 1 752 | bias_term: false 753 | } 754 | } 755 | 756 | layer{ 757 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn" 758 | type: "BN" 759 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1" 760 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn" 761 | bn_param { 762 | frozen: true 763 | } 764 | } 765 | 766 | layer{ 767 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn/relu" 768 | type: "ReLU" 769 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn" 770 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn" 771 | } 772 | 773 | layer{ 774 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3" 775 | type: "Convolution" 776 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv1_1x1/bn" 777 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3" 778 | convolution_param { 779 | num_output: 64 780 | pad: 1 781 | kernel_size: 3 782 | stride: 1 783 | bias_term: false 784 | } 785 | } 786 | 787 | layer{ 788 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn" 789 | type: "BN" 790 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3" 791 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn" 792 | bn_param { 793 | frozen: true 794 | } 795 | } 796 | 797 | layer{ 798 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn/relu" 799 | type: "ReLU" 800 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn" 801 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn" 802 | } 803 | 804 | layer{ 805 | name: "AttentionA_1/mask/down_sample/res3_1/branch1/conv3_1x1" 806 | type: "Convolution" 807 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv2_3x3/bn" 808 | top: "AttentionA_1/mask/down_sample/res3_1/branch1/conv3_1x1" 809 | convolution_param { 810 | num_output: 256 811 | pad: 0 812 | kernel_size: 1 813 | stride: 1 814 | bias_term: false 815 | } 816 | } 817 | 818 | layer{ 819 | name: "AttentionA_1/mask/down_sample/res3_1" 820 | type: "Eltwise" 821 | bottom: "AttentionA_1/mask/down_sample/res3_1/branch1/conv3_1x1" 822 | bottom: "AttentionA_1/mask/down_sample/pool3_3x3_s2" 823 | top: "AttentionA_1/mask/down_sample/res3_1" 824 | eltwise_param { 825 | operation: SUM 826 | } 827 | } 828 | 829 | layer{ 830 | name: "AttentionA_1/mask/down_sample/res3_1/bn" 831 | type: "BN" 832 | bottom: "AttentionA_1/mask/down_sample/res3_1" 833 | top: "AttentionA_1/mask/down_sample/res3_1/bn" 834 | bn_param { 835 | frozen: true 836 | } 837 | } 838 | 839 | layer{ 840 | name: "AttentionA_1/mask/down_sample/res3_1/bn/relu" 841 | type: "ReLU" 842 | bottom: "AttentionA_1/mask/down_sample/res3_1/bn" 843 | top: "AttentionA_1/mask/down_sample/res3_1/bn" 844 | } 845 | 846 | layer{ 847 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1" 848 | type: "Convolution" 849 | bottom: "AttentionA_1/mask/down_sample/res3_1/bn" 850 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1" 851 | convolution_param { 852 | num_output: 64 853 | pad: 0 854 | kernel_size: 1 855 | stride: 1 856 | bias_term: false 857 | } 858 | } 859 | 860 | layer{ 861 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn" 862 | type: "BN" 863 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1" 864 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn" 865 | bn_param { 866 | frozen: true 867 | } 868 | } 869 | 870 | layer{ 871 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn/relu" 872 | type: "ReLU" 873 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn" 874 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn" 875 | } 876 | 877 | layer{ 878 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3" 879 | type: "Convolution" 880 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv1_1x1/bn" 881 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3" 882 | convolution_param { 883 | num_output: 64 884 | pad: 1 885 | kernel_size: 3 886 | stride: 1 887 | bias_term: false 888 | } 889 | } 890 | 891 | layer{ 892 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn" 893 | type: "BN" 894 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3" 895 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn" 896 | bn_param { 897 | frozen: true 898 | } 899 | } 900 | 901 | layer{ 902 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn/relu" 903 | type: "ReLU" 904 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn" 905 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn" 906 | } 907 | 908 | layer{ 909 | name: "AttentionA_1/mask/down_sample/res3_2/branch1/conv3_1x1" 910 | type: "Convolution" 911 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv2_3x3/bn" 912 | top: "AttentionA_1/mask/down_sample/res3_2/branch1/conv3_1x1" 913 | convolution_param { 914 | num_output: 256 915 | pad: 0 916 | kernel_size: 1 917 | stride: 1 918 | bias_term: false 919 | } 920 | } 921 | 922 | layer{ 923 | name: "AttentionA_1/mask/down_sample/res3_2" 924 | type: "Eltwise" 925 | bottom: "AttentionA_1/mask/down_sample/res3_2/branch1/conv3_1x1" 926 | bottom: "AttentionA_1/mask/down_sample/res3_1" 927 | top: "AttentionA_1/mask/down_sample/res3_2" 928 | eltwise_param { 929 | operation: SUM 930 | } 931 | } 932 | 933 | layer{ 934 | name: "AttentionA_1/mask/up_sample/interp_3" 935 | type: "Interp" 936 | bottom: "AttentionA_1/mask/down_sample/res3_2" 937 | bottom: "AttentionA_1/mask/down_sample/res2_1" 938 | top: "AttentionA_1/mask/up_sample/interp_3" 939 | } 940 | 941 | layer{ 942 | name: "AttentionA_1/mask/down_sample/res2_1/bn" 943 | type: "BN" 944 | bottom: "AttentionA_1/mask/down_sample/res2_1" 945 | top: "AttentionA_1/mask/down_sample/res2_1/bn" 946 | bn_param { 947 | frozen: true 948 | } 949 | } 950 | 951 | layer{ 952 | name: "AttentionA_1/mask/down_sample/res2_1/bn/relu" 953 | type: "ReLU" 954 | bottom: "AttentionA_1/mask/down_sample/res2_1/bn" 955 | top: "AttentionA_1/mask/down_sample/res2_1/bn" 956 | } 957 | 958 | layer{ 959 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 960 | type: "Convolution" 961 | bottom: "AttentionA_1/mask/down_sample/res2_1/bn" 962 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 963 | convolution_param { 964 | num_output: 64 965 | pad: 0 966 | kernel_size: 1 967 | stride: 1 968 | bias_term: false 969 | } 970 | } 971 | 972 | layer{ 973 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 974 | type: "BN" 975 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 976 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 977 | bn_param { 978 | frozen: true 979 | } 980 | } 981 | 982 | layer{ 983 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn/relu" 984 | type: "ReLU" 985 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 986 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 987 | } 988 | 989 | layer{ 990 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 991 | type: "Convolution" 992 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 993 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 994 | convolution_param { 995 | num_output: 64 996 | pad: 1 997 | kernel_size: 3 998 | stride: 1 999 | bias_term: false 1000 | } 1001 | } 1002 | 1003 | layer{ 1004 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1005 | type: "BN" 1006 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 1007 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1008 | bn_param { 1009 | frozen: true 1010 | } 1011 | } 1012 | 1013 | layer{ 1014 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn/relu" 1015 | type: "ReLU" 1016 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1017 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1018 | } 1019 | 1020 | layer{ 1021 | name: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1022 | type: "Convolution" 1023 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1024 | top: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1025 | convolution_param { 1026 | num_output: 256 1027 | pad: 0 1028 | kernel_size: 1 1029 | stride: 1 1030 | bias_term: false 1031 | } 1032 | } 1033 | 1034 | layer{ 1035 | name: "AttentionA_1/mask/skip/res2" 1036 | type: "Eltwise" 1037 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1038 | bottom: "AttentionA_1/mask/down_sample/res2_1" 1039 | top: "AttentionA_1/mask/skip/res2" 1040 | eltwise_param { 1041 | operation: SUM 1042 | } 1043 | } 1044 | 1045 | layer{ 1046 | name: "AttentionA_1/mask/up_sample2" 1047 | type: "Eltwise" 1048 | bottom: "AttentionA_1/mask/skip/res2" 1049 | bottom: "AttentionA_1/mask/up_sample/interp_3" 1050 | top: "AttentionA_1/mask/up_sample2" 1051 | eltwise_param { 1052 | operation: SUM 1053 | } 1054 | } 1055 | 1056 | layer{ 1057 | name: "AttentionA_1/mask/up_sample2/bn" 1058 | type: "BN" 1059 | bottom: "AttentionA_1/mask/up_sample2" 1060 | top: "AttentionA_1/mask/up_sample2/bn" 1061 | bn_param { 1062 | frozen: true 1063 | } 1064 | } 1065 | 1066 | layer{ 1067 | name: "AttentionA_1/mask/up_sample2/bn/relu" 1068 | type: "ReLU" 1069 | bottom: "AttentionA_1/mask/up_sample2/bn" 1070 | top: "AttentionA_1/mask/up_sample2/bn" 1071 | } 1072 | 1073 | layer{ 1074 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1" 1075 | type: "Convolution" 1076 | bottom: "AttentionA_1/mask/up_sample2/bn" 1077 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1" 1078 | convolution_param { 1079 | num_output: 64 1080 | pad: 0 1081 | kernel_size: 1 1082 | stride: 1 1083 | bias_term: false 1084 | } 1085 | } 1086 | 1087 | layer{ 1088 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn" 1089 | type: "BN" 1090 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1" 1091 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn" 1092 | bn_param { 1093 | frozen: true 1094 | } 1095 | } 1096 | 1097 | layer{ 1098 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn/relu" 1099 | type: "ReLU" 1100 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn" 1101 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn" 1102 | } 1103 | 1104 | layer{ 1105 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3" 1106 | type: "Convolution" 1107 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv1_1x1/bn" 1108 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3" 1109 | convolution_param { 1110 | num_output: 64 1111 | pad: 1 1112 | kernel_size: 3 1113 | stride: 1 1114 | bias_term: false 1115 | } 1116 | } 1117 | 1118 | layer{ 1119 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn" 1120 | type: "BN" 1121 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3" 1122 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn" 1123 | bn_param { 1124 | frozen: true 1125 | } 1126 | } 1127 | 1128 | layer{ 1129 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn/relu" 1130 | type: "ReLU" 1131 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn" 1132 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn" 1133 | } 1134 | 1135 | layer{ 1136 | name: "AttentionA_1/mask/up_sample/res2_1/branch1/conv3_1x1" 1137 | type: "Convolution" 1138 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv2_3x3/bn" 1139 | top: "AttentionA_1/mask/up_sample/res2_1/branch1/conv3_1x1" 1140 | convolution_param { 1141 | num_output: 256 1142 | pad: 0 1143 | kernel_size: 1 1144 | stride: 1 1145 | bias_term: false 1146 | } 1147 | } 1148 | 1149 | layer{ 1150 | name: "AttentionA_1/mask/up_sample/res2_1" 1151 | type: "Eltwise" 1152 | bottom: "AttentionA_1/mask/up_sample/res2_1/branch1/conv3_1x1" 1153 | bottom: "AttentionA_1/mask/up_sample2" 1154 | top: "AttentionA_1/mask/up_sample/res2_1" 1155 | eltwise_param { 1156 | operation: SUM 1157 | } 1158 | } 1159 | 1160 | layer{ 1161 | name: "AttentionA_1/mask/up_sample/interp_2" 1162 | type: "Interp" 1163 | bottom: "AttentionA_1/mask/up_sample/res2_1" 1164 | bottom: "AttentionA_1/mask/down_sample/res1_1" 1165 | top: "AttentionA_1/mask/up_sample/interp_2" 1166 | } 1167 | 1168 | layer{ 1169 | name: "AttentionA_1/mask/down_sample/res1_1/bn" 1170 | type: "BN" 1171 | bottom: "AttentionA_1/mask/down_sample/res1_1" 1172 | top: "AttentionA_1/mask/down_sample/res1_1/bn" 1173 | bn_param { 1174 | frozen: true 1175 | } 1176 | } 1177 | 1178 | layer{ 1179 | name: "AttentionA_1/mask/down_sample/res1_1/bn/relu" 1180 | type: "ReLU" 1181 | bottom: "AttentionA_1/mask/down_sample/res1_1/bn" 1182 | top: "AttentionA_1/mask/down_sample/res1_1/bn" 1183 | } 1184 | 1185 | layer{ 1186 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1187 | type: "Convolution" 1188 | bottom: "AttentionA_1/mask/down_sample/res1_1/bn" 1189 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1190 | convolution_param { 1191 | num_output: 64 1192 | pad: 0 1193 | kernel_size: 1 1194 | stride: 1 1195 | bias_term: false 1196 | } 1197 | } 1198 | 1199 | layer{ 1200 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1201 | type: "BN" 1202 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1203 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1204 | bn_param { 1205 | frozen: true 1206 | } 1207 | } 1208 | 1209 | layer{ 1210 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn/relu" 1211 | type: "ReLU" 1212 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1213 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1214 | } 1215 | 1216 | layer{ 1217 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1218 | type: "Convolution" 1219 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1220 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1221 | convolution_param { 1222 | num_output: 64 1223 | pad: 1 1224 | kernel_size: 3 1225 | stride: 1 1226 | bias_term: false 1227 | } 1228 | } 1229 | 1230 | layer{ 1231 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1232 | type: "BN" 1233 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1234 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1235 | bn_param { 1236 | frozen: true 1237 | } 1238 | } 1239 | 1240 | layer{ 1241 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn/relu" 1242 | type: "ReLU" 1243 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1244 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1245 | } 1246 | 1247 | layer{ 1248 | name: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1249 | type: "Convolution" 1250 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1251 | top: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1252 | convolution_param { 1253 | num_output: 256 1254 | pad: 0 1255 | kernel_size: 1 1256 | stride: 1 1257 | bias_term: false 1258 | } 1259 | } 1260 | 1261 | layer{ 1262 | name: "AttentionA_1/mask/skip/res1" 1263 | type: "Eltwise" 1264 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1265 | bottom: "AttentionA_1/mask/down_sample/res1_1" 1266 | top: "AttentionA_1/mask/skip/res1" 1267 | eltwise_param { 1268 | operation: SUM 1269 | } 1270 | } 1271 | 1272 | layer{ 1273 | name: "AttentionA_1/mask/up_sample1" 1274 | type: "Eltwise" 1275 | bottom: "AttentionA_1/mask/skip/res1" 1276 | bottom: "AttentionA_1/mask/up_sample/interp_2" 1277 | top: "AttentionA_1/mask/up_sample1" 1278 | eltwise_param { 1279 | operation: SUM 1280 | } 1281 | } 1282 | 1283 | layer{ 1284 | name: "AttentionA_1/mask/up_sample1/bn" 1285 | type: "BN" 1286 | bottom: "AttentionA_1/mask/up_sample1" 1287 | top: "AttentionA_1/mask/up_sample1/bn" 1288 | bn_param { 1289 | frozen: true 1290 | } 1291 | } 1292 | 1293 | layer{ 1294 | name: "AttentionA_1/mask/up_sample1/bn/relu" 1295 | type: "ReLU" 1296 | bottom: "AttentionA_1/mask/up_sample1/bn" 1297 | top: "AttentionA_1/mask/up_sample1/bn" 1298 | } 1299 | 1300 | layer{ 1301 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1" 1302 | type: "Convolution" 1303 | bottom: "AttentionA_1/mask/up_sample1/bn" 1304 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1" 1305 | convolution_param { 1306 | num_output: 64 1307 | pad: 0 1308 | kernel_size: 1 1309 | stride: 1 1310 | bias_term: false 1311 | } 1312 | } 1313 | 1314 | layer{ 1315 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 1316 | type: "BN" 1317 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1" 1318 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 1319 | bn_param { 1320 | frozen: true 1321 | } 1322 | } 1323 | 1324 | layer{ 1325 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn/relu" 1326 | type: "ReLU" 1327 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 1328 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 1329 | } 1330 | 1331 | layer{ 1332 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3" 1333 | type: "Convolution" 1334 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 1335 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3" 1336 | convolution_param { 1337 | num_output: 64 1338 | pad: 1 1339 | kernel_size: 3 1340 | stride: 1 1341 | bias_term: false 1342 | } 1343 | } 1344 | 1345 | layer{ 1346 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 1347 | type: "BN" 1348 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3" 1349 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 1350 | bn_param { 1351 | frozen: true 1352 | } 1353 | } 1354 | 1355 | layer{ 1356 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn/relu" 1357 | type: "ReLU" 1358 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 1359 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 1360 | } 1361 | 1362 | layer{ 1363 | name: "AttentionA_1/mask/up_sample/res1_1/branch1/conv3_1x1" 1364 | type: "Convolution" 1365 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 1366 | top: "AttentionA_1/mask/up_sample/res1_1/branch1/conv3_1x1" 1367 | convolution_param { 1368 | num_output: 256 1369 | pad: 0 1370 | kernel_size: 1 1371 | stride: 1 1372 | bias_term: false 1373 | } 1374 | } 1375 | 1376 | layer{ 1377 | name: "AttentionA_1/mask/up_sample/res1_1" 1378 | type: "Eltwise" 1379 | bottom: "AttentionA_1/mask/up_sample/res1_1/branch1/conv3_1x1" 1380 | bottom: "AttentionA_1/mask/up_sample1" 1381 | top: "AttentionA_1/mask/up_sample/res1_1" 1382 | eltwise_param { 1383 | operation: SUM 1384 | } 1385 | } 1386 | 1387 | layer{ 1388 | name: "AttentionA_1/mask/up_sample/interp_1" 1389 | type: "Interp" 1390 | bottom: "AttentionA_1/mask/up_sample/res1_1" 1391 | bottom: "AttentionA_1/trunk/res3" 1392 | top: "AttentionA_1/mask/up_sample/interp_1" 1393 | } 1394 | 1395 | layer{ 1396 | name: "AttentionA_1/mask/up_sample/interp_1/bn" 1397 | type: "BN" 1398 | bottom: "AttentionA_1/mask/up_sample/interp_1" 1399 | top: "AttentionA_1/mask/up_sample/interp_1/bn" 1400 | bn_param { 1401 | frozen: true 1402 | } 1403 | } 1404 | 1405 | layer{ 1406 | name: "AttentionA_1/mask/up_sample/interp_1/bn/relu" 1407 | type: "ReLU" 1408 | bottom: "AttentionA_1/mask/up_sample/interp_1/bn" 1409 | top: "AttentionA_1/mask/up_sample/interp_1/bn" 1410 | } 1411 | 1412 | layer{ 1413 | name: "AttentionA_1/mask/linear_1" 1414 | type: "Convolution" 1415 | bottom: "AttentionA_1/mask/up_sample/interp_1/bn" 1416 | top: "AttentionA_1/mask/linear_1" 1417 | convolution_param { 1418 | num_output: 256 1419 | pad: 0 1420 | kernel_size: 1 1421 | stride: 1 1422 | bias_term: false 1423 | } 1424 | } 1425 | 1426 | layer{ 1427 | name: "AttentionA_1/mask/linear_1/bn" 1428 | type: "BN" 1429 | bottom: "AttentionA_1/mask/linear_1" 1430 | top: "AttentionA_1/mask/linear_1/bn" 1431 | bn_param { 1432 | frozen: true 1433 | } 1434 | } 1435 | 1436 | layer{ 1437 | name: "AttentionA_1/mask/linear_1/bn/relu" 1438 | type: "ReLU" 1439 | bottom: "AttentionA_1/mask/linear_1/bn" 1440 | top: "AttentionA_1/mask/linear_1/bn" 1441 | } 1442 | 1443 | layer{ 1444 | name: "AttentionA_1/mask/linear_2" 1445 | type: "Convolution" 1446 | bottom: "AttentionA_1/mask/linear_1/bn" 1447 | top: "AttentionA_1/mask/linear_2" 1448 | convolution_param { 1449 | num_output: 256 1450 | pad: 0 1451 | kernel_size: 1 1452 | stride: 1 1453 | bias_term: false 1454 | } 1455 | } 1456 | 1457 | layer{ 1458 | name: "AttentionA_1/mask" 1459 | type: "Sigmoid" 1460 | bottom: "AttentionA_1/mask/linear_2" 1461 | top: "AttentionA_1/mask" 1462 | } 1463 | 1464 | layer{ 1465 | name: "AttentionA_1_residual" 1466 | type: "Eltwise" 1467 | bottom: "AttentionA_1/trunk/res3" 1468 | bottom: "AttentionA_1/mask" 1469 | top: "AttentionA_1_residual" 1470 | eltwise_param { 1471 | operation: PROD 1472 | } 1473 | } 1474 | 1475 | layer{ 1476 | name: "AttentionA_1/fusion" 1477 | type: "Eltwise" 1478 | bottom: "AttentionA_1_residual" 1479 | bottom: "AttentionA_1/trunk/res3" 1480 | top: "AttentionA_1/fusion" 1481 | eltwise_param { 1482 | operation: SUM 1483 | } 1484 | } 1485 | 1486 | layer{ 1487 | name: "AttentionA_1/fusion/bn" 1488 | type: "BN" 1489 | bottom: "AttentionA_1/fusion" 1490 | top: "AttentionA_1/fusion/bn" 1491 | bn_param { 1492 | frozen: true 1493 | } 1494 | } 1495 | 1496 | layer{ 1497 | name: "AttentionA_1/fusion/bn/relu" 1498 | type: "ReLU" 1499 | bottom: "AttentionA_1/fusion/bn" 1500 | top: "AttentionA_1/fusion/bn" 1501 | } 1502 | 1503 | layer{ 1504 | name: "AttentionA_1/branch1/conv1_1x1" 1505 | type: "Convolution" 1506 | bottom: "AttentionA_1/fusion/bn" 1507 | top: "AttentionA_1/branch1/conv1_1x1" 1508 | convolution_param { 1509 | num_output: 64 1510 | pad: 0 1511 | kernel_size: 1 1512 | stride: 1 1513 | bias_term: false 1514 | } 1515 | } 1516 | 1517 | layer{ 1518 | name: "AttentionA_1/branch1/conv1_1x1/bn" 1519 | type: "BN" 1520 | bottom: "AttentionA_1/branch1/conv1_1x1" 1521 | top: "AttentionA_1/branch1/conv1_1x1/bn" 1522 | bn_param { 1523 | frozen: true 1524 | } 1525 | } 1526 | 1527 | layer{ 1528 | name: "AttentionA_1/branch1/conv1_1x1/bn/relu" 1529 | type: "ReLU" 1530 | bottom: "AttentionA_1/branch1/conv1_1x1/bn" 1531 | top: "AttentionA_1/branch1/conv1_1x1/bn" 1532 | } 1533 | 1534 | layer{ 1535 | name: "AttentionA_1/branch1/conv2_3x3" 1536 | type: "Convolution" 1537 | bottom: "AttentionA_1/branch1/conv1_1x1/bn" 1538 | top: "AttentionA_1/branch1/conv2_3x3" 1539 | convolution_param { 1540 | num_output: 64 1541 | pad: 1 1542 | kernel_size: 3 1543 | stride: 1 1544 | bias_term: false 1545 | } 1546 | } 1547 | 1548 | layer{ 1549 | name: "AttentionA_1/branch1/conv2_3x3/bn" 1550 | type: "BN" 1551 | bottom: "AttentionA_1/branch1/conv2_3x3" 1552 | top: "AttentionA_1/branch1/conv2_3x3/bn" 1553 | bn_param { 1554 | frozen: true 1555 | } 1556 | } 1557 | 1558 | layer{ 1559 | name: "AttentionA_1/branch1/conv2_3x3/bn/relu" 1560 | type: "ReLU" 1561 | bottom: "AttentionA_1/branch1/conv2_3x3/bn" 1562 | top: "AttentionA_1/branch1/conv2_3x3/bn" 1563 | } 1564 | 1565 | layer{ 1566 | name: "AttentionA_1/branch1/conv3_1x1" 1567 | type: "Convolution" 1568 | bottom: "AttentionA_1/branch1/conv2_3x3/bn" 1569 | top: "AttentionA_1/branch1/conv3_1x1" 1570 | convolution_param { 1571 | num_output: 256 1572 | pad: 0 1573 | kernel_size: 1 1574 | stride: 1 1575 | bias_term: false 1576 | } 1577 | } 1578 | 1579 | layer{ 1580 | name: "AttentionA_1" 1581 | type: "Eltwise" 1582 | bottom: "AttentionA_1/branch1/conv3_1x1" 1583 | bottom: "AttentionA_1/fusion" 1584 | top: "AttentionA_1" 1585 | eltwise_param { 1586 | operation: SUM 1587 | } 1588 | } 1589 | 1590 | layer{ 1591 | name: "AttentionA_1/bn" 1592 | type: "BN" 1593 | bottom: "AttentionA_1" 1594 | top: "AttentionA_1/bn" 1595 | bn_param { 1596 | frozen: true 1597 | } 1598 | } 1599 | 1600 | layer{ 1601 | name: "AttentionA_1/bn/relu" 1602 | type: "ReLU" 1603 | bottom: "AttentionA_1/bn" 1604 | top: "AttentionA_1/bn" 1605 | } 1606 | 1607 | layer{ 1608 | name: "pre_res_2/branch1/conv1_1x1" 1609 | type: "Convolution" 1610 | bottom: "AttentionA_1/bn" 1611 | top: "pre_res_2/branch1/conv1_1x1" 1612 | convolution_param { 1613 | num_output: 128 1614 | pad: 0 1615 | kernel_size: 1 1616 | stride: 1 1617 | bias_term: false 1618 | } 1619 | } 1620 | 1621 | layer{ 1622 | name: "pre_res_2/branch1/conv1_1x1/bn" 1623 | type: "BN" 1624 | bottom: "pre_res_2/branch1/conv1_1x1" 1625 | top: "pre_res_2/branch1/conv1_1x1/bn" 1626 | bn_param { 1627 | frozen: true 1628 | } 1629 | } 1630 | 1631 | layer{ 1632 | name: "pre_res_2/branch1/conv1_1x1/bn/relu" 1633 | type: "ReLU" 1634 | bottom: "pre_res_2/branch1/conv1_1x1/bn" 1635 | top: "pre_res_2/branch1/conv1_1x1/bn" 1636 | } 1637 | 1638 | layer{ 1639 | name: "pre_res_2/branch1/conv2_3x3" 1640 | type: "Convolution" 1641 | bottom: "pre_res_2/branch1/conv1_1x1/bn" 1642 | top: "pre_res_2/branch1/conv2_3x3" 1643 | convolution_param { 1644 | num_output: 128 1645 | pad: 1 1646 | kernel_size: 3 1647 | stride: 2 1648 | bias_term: false 1649 | } 1650 | } 1651 | 1652 | layer{ 1653 | name: "pre_res_2/branch1/conv2_3x3/bn" 1654 | type: "BN" 1655 | bottom: "pre_res_2/branch1/conv2_3x3" 1656 | top: "pre_res_2/branch1/conv2_3x3/bn" 1657 | bn_param { 1658 | frozen: true 1659 | } 1660 | } 1661 | 1662 | layer{ 1663 | name: "pre_res_2/branch1/conv2_3x3/bn/relu" 1664 | type: "ReLU" 1665 | bottom: "pre_res_2/branch1/conv2_3x3/bn" 1666 | top: "pre_res_2/branch1/conv2_3x3/bn" 1667 | } 1668 | 1669 | layer{ 1670 | name: "pre_res_2/branch1/conv3_1x1" 1671 | type: "Convolution" 1672 | bottom: "pre_res_2/branch1/conv2_3x3/bn" 1673 | top: "pre_res_2/branch1/conv3_1x1" 1674 | convolution_param { 1675 | num_output: 512 1676 | pad: 0 1677 | kernel_size: 1 1678 | stride: 1 1679 | bias_term: false 1680 | } 1681 | } 1682 | 1683 | layer{ 1684 | name: "pre_res_2/branch2/conv1_1x1" 1685 | type: "Convolution" 1686 | bottom: "AttentionA_1/bn" 1687 | top: "pre_res_2/branch2/conv1_1x1" 1688 | convolution_param { 1689 | num_output: 512 1690 | pad: 0 1691 | kernel_size: 1 1692 | stride: 2 1693 | bias_term: false 1694 | } 1695 | } 1696 | 1697 | layer{ 1698 | name: "pre_res_2" 1699 | type: "Eltwise" 1700 | bottom: "pre_res_2/branch2/conv1_1x1" 1701 | bottom: "pre_res_2/branch1/conv3_1x1" 1702 | top: "pre_res_2" 1703 | eltwise_param { 1704 | operation: SUM 1705 | } 1706 | } 1707 | 1708 | layer{ 1709 | name: "pre_res_2/bn" 1710 | type: "BN" 1711 | bottom: "pre_res_2" 1712 | top: "pre_res_2/bn" 1713 | bn_param { 1714 | frozen: true 1715 | } 1716 | } 1717 | 1718 | layer{ 1719 | name: "pre_res_2/bn/relu" 1720 | type: "ReLU" 1721 | bottom: "pre_res_2/bn" 1722 | top: "pre_res_2/bn" 1723 | } 1724 | 1725 | layer{ 1726 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1727 | type: "Convolution" 1728 | bottom: "pre_res_2/bn" 1729 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1730 | convolution_param { 1731 | num_output: 128 1732 | pad: 0 1733 | kernel_size: 1 1734 | stride: 1 1735 | bias_term: false 1736 | } 1737 | } 1738 | 1739 | layer{ 1740 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1741 | type: "BN" 1742 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1743 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1744 | bn_param { 1745 | frozen: true 1746 | } 1747 | } 1748 | 1749 | layer{ 1750 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn/relu" 1751 | type: "ReLU" 1752 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1753 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1754 | } 1755 | 1756 | layer{ 1757 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1758 | type: "Convolution" 1759 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1760 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1761 | convolution_param { 1762 | num_output: 128 1763 | pad: 1 1764 | kernel_size: 3 1765 | stride: 1 1766 | bias_term: false 1767 | } 1768 | } 1769 | 1770 | layer{ 1771 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1772 | type: "BN" 1773 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1774 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1775 | bn_param { 1776 | frozen: true 1777 | } 1778 | } 1779 | 1780 | layer{ 1781 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn/relu" 1782 | type: "ReLU" 1783 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1784 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1785 | } 1786 | 1787 | layer{ 1788 | name: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 1789 | type: "Convolution" 1790 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1791 | top: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 1792 | convolution_param { 1793 | num_output: 512 1794 | pad: 0 1795 | kernel_size: 1 1796 | stride: 1 1797 | bias_term: false 1798 | } 1799 | } 1800 | 1801 | layer{ 1802 | name: "AttentionB_1/trunk/res1" 1803 | type: "Eltwise" 1804 | bottom: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 1805 | bottom: "pre_res_2" 1806 | top: "AttentionB_1/trunk/res1" 1807 | eltwise_param { 1808 | operation: SUM 1809 | } 1810 | } 1811 | 1812 | layer{ 1813 | name: "AttentionB_1/trunk/res1/bn" 1814 | type: "BN" 1815 | bottom: "AttentionB_1/trunk/res1" 1816 | top: "AttentionB_1/trunk/res1/bn" 1817 | bn_param { 1818 | frozen: true 1819 | } 1820 | } 1821 | 1822 | layer{ 1823 | name: "AttentionB_1/trunk/res1/bn/relu" 1824 | type: "ReLU" 1825 | bottom: "AttentionB_1/trunk/res1/bn" 1826 | top: "AttentionB_1/trunk/res1/bn" 1827 | } 1828 | 1829 | layer{ 1830 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 1831 | type: "Convolution" 1832 | bottom: "AttentionB_1/trunk/res1/bn" 1833 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 1834 | convolution_param { 1835 | num_output: 128 1836 | pad: 0 1837 | kernel_size: 1 1838 | stride: 1 1839 | bias_term: false 1840 | } 1841 | } 1842 | 1843 | layer{ 1844 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 1845 | type: "BN" 1846 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 1847 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 1848 | bn_param { 1849 | frozen: true 1850 | } 1851 | } 1852 | 1853 | layer{ 1854 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn/relu" 1855 | type: "ReLU" 1856 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 1857 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 1858 | } 1859 | 1860 | layer{ 1861 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 1862 | type: "Convolution" 1863 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 1864 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 1865 | convolution_param { 1866 | num_output: 128 1867 | pad: 1 1868 | kernel_size: 3 1869 | stride: 1 1870 | bias_term: false 1871 | } 1872 | } 1873 | 1874 | layer{ 1875 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 1876 | type: "BN" 1877 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 1878 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 1879 | bn_param { 1880 | frozen: true 1881 | } 1882 | } 1883 | 1884 | layer{ 1885 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn/relu" 1886 | type: "ReLU" 1887 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 1888 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 1889 | } 1890 | 1891 | layer{ 1892 | name: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 1893 | type: "Convolution" 1894 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 1895 | top: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 1896 | convolution_param { 1897 | num_output: 512 1898 | pad: 0 1899 | kernel_size: 1 1900 | stride: 1 1901 | bias_term: false 1902 | } 1903 | } 1904 | 1905 | layer{ 1906 | name: "AttentionB_1/trunk/res2" 1907 | type: "Eltwise" 1908 | bottom: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 1909 | bottom: "AttentionB_1/trunk/res1" 1910 | top: "AttentionB_1/trunk/res2" 1911 | eltwise_param { 1912 | operation: SUM 1913 | } 1914 | } 1915 | 1916 | layer{ 1917 | name: "AttentionB_1/trunk/res2/bn" 1918 | type: "BN" 1919 | bottom: "AttentionB_1/trunk/res2" 1920 | top: "AttentionB_1/trunk/res2/bn" 1921 | bn_param { 1922 | frozen: true 1923 | } 1924 | } 1925 | 1926 | layer{ 1927 | name: "AttentionB_1/trunk/res2/bn/relu" 1928 | type: "ReLU" 1929 | bottom: "AttentionB_1/trunk/res2/bn" 1930 | top: "AttentionB_1/trunk/res2/bn" 1931 | } 1932 | 1933 | layer{ 1934 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 1935 | type: "Convolution" 1936 | bottom: "AttentionB_1/trunk/res2/bn" 1937 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 1938 | convolution_param { 1939 | num_output: 128 1940 | pad: 0 1941 | kernel_size: 1 1942 | stride: 1 1943 | bias_term: false 1944 | } 1945 | } 1946 | 1947 | layer{ 1948 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 1949 | type: "BN" 1950 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 1951 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 1952 | bn_param { 1953 | frozen: true 1954 | } 1955 | } 1956 | 1957 | layer{ 1958 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn/relu" 1959 | type: "ReLU" 1960 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 1961 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 1962 | } 1963 | 1964 | layer{ 1965 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 1966 | type: "Convolution" 1967 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 1968 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 1969 | convolution_param { 1970 | num_output: 128 1971 | pad: 1 1972 | kernel_size: 3 1973 | stride: 1 1974 | bias_term: false 1975 | } 1976 | } 1977 | 1978 | layer{ 1979 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 1980 | type: "BN" 1981 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 1982 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 1983 | bn_param { 1984 | frozen: true 1985 | } 1986 | } 1987 | 1988 | layer{ 1989 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn/relu" 1990 | type: "ReLU" 1991 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 1992 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 1993 | } 1994 | 1995 | layer{ 1996 | name: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 1997 | type: "Convolution" 1998 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 1999 | top: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 2000 | convolution_param { 2001 | num_output: 512 2002 | pad: 0 2003 | kernel_size: 1 2004 | stride: 1 2005 | bias_term: false 2006 | } 2007 | } 2008 | 2009 | layer{ 2010 | name: "AttentionB_1/trunk/res3" 2011 | type: "Eltwise" 2012 | bottom: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 2013 | bottom: "AttentionB_1/trunk/res2" 2014 | top: "AttentionB_1/trunk/res3" 2015 | eltwise_param { 2016 | operation: SUM 2017 | } 2018 | } 2019 | 2020 | layer{ 2021 | name: "AttentionB_1/mask/down_sample/pool1_3x3_s2" 2022 | type: "Pooling" 2023 | bottom: "AttentionB_1/trunk/res1" 2024 | top: "AttentionB_1/mask/down_sample/pool1_3x3_s2" 2025 | pooling_param { 2026 | pool: MAX 2027 | kernel_size: 3 2028 | stride: 2 2029 | } 2030 | } 2031 | 2032 | layer{ 2033 | name: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn" 2034 | type: "BN" 2035 | bottom: "AttentionB_1/mask/down_sample/pool1_3x3_s2" 2036 | top: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn" 2037 | bn_param { 2038 | frozen: true 2039 | } 2040 | } 2041 | 2042 | layer{ 2043 | name: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn/relu" 2044 | type: "ReLU" 2045 | bottom: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn" 2046 | top: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn" 2047 | } 2048 | 2049 | layer{ 2050 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1" 2051 | type: "Convolution" 2052 | bottom: "AttentionB_1/mask/down_sample/pool1_3x3_s2/bn" 2053 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1" 2054 | convolution_param { 2055 | num_output: 128 2056 | pad: 0 2057 | kernel_size: 1 2058 | stride: 1 2059 | bias_term: false 2060 | } 2061 | } 2062 | 2063 | layer{ 2064 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 2065 | type: "BN" 2066 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1" 2067 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 2068 | bn_param { 2069 | frozen: true 2070 | } 2071 | } 2072 | 2073 | layer{ 2074 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn/relu" 2075 | type: "ReLU" 2076 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 2077 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 2078 | } 2079 | 2080 | layer{ 2081 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3" 2082 | type: "Convolution" 2083 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 2084 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3" 2085 | convolution_param { 2086 | num_output: 128 2087 | pad: 1 2088 | kernel_size: 3 2089 | stride: 1 2090 | bias_term: false 2091 | } 2092 | } 2093 | 2094 | layer{ 2095 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 2096 | type: "BN" 2097 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3" 2098 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 2099 | bn_param { 2100 | frozen: true 2101 | } 2102 | } 2103 | 2104 | layer{ 2105 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn/relu" 2106 | type: "ReLU" 2107 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 2108 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 2109 | } 2110 | 2111 | layer{ 2112 | name: "AttentionB_1/mask/down_sample/res1_1/branch1/conv3_1x1" 2113 | type: "Convolution" 2114 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 2115 | top: "AttentionB_1/mask/down_sample/res1_1/branch1/conv3_1x1" 2116 | convolution_param { 2117 | num_output: 512 2118 | pad: 0 2119 | kernel_size: 1 2120 | stride: 1 2121 | bias_term: false 2122 | } 2123 | } 2124 | 2125 | layer{ 2126 | name: "AttentionB_1/mask/down_sample/res1_1" 2127 | type: "Eltwise" 2128 | bottom: "AttentionB_1/mask/down_sample/res1_1/branch1/conv3_1x1" 2129 | bottom: "AttentionB_1/mask/down_sample/pool1_3x3_s2" 2130 | top: "AttentionB_1/mask/down_sample/res1_1" 2131 | eltwise_param { 2132 | operation: SUM 2133 | } 2134 | } 2135 | 2136 | layer{ 2137 | name: "AttentionB_1/mask/down_sample/pool2_3x3_s2" 2138 | type: "Pooling" 2139 | bottom: "AttentionB_1/mask/down_sample/res1_1" 2140 | top: "AttentionB_1/mask/down_sample/pool2_3x3_s2" 2141 | pooling_param { 2142 | pool: MAX 2143 | kernel_size: 3 2144 | stride: 2 2145 | } 2146 | } 2147 | 2148 | layer{ 2149 | name: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn" 2150 | type: "BN" 2151 | bottom: "AttentionB_1/mask/down_sample/pool2_3x3_s2" 2152 | top: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn" 2153 | bn_param { 2154 | frozen: true 2155 | } 2156 | } 2157 | 2158 | layer{ 2159 | name: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn/relu" 2160 | type: "ReLU" 2161 | bottom: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn" 2162 | top: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn" 2163 | } 2164 | 2165 | layer{ 2166 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1" 2167 | type: "Convolution" 2168 | bottom: "AttentionB_1/mask/down_sample/pool2_3x3_s2/bn" 2169 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1" 2170 | convolution_param { 2171 | num_output: 128 2172 | pad: 0 2173 | kernel_size: 1 2174 | stride: 1 2175 | bias_term: false 2176 | } 2177 | } 2178 | 2179 | layer{ 2180 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 2181 | type: "BN" 2182 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1" 2183 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 2184 | bn_param { 2185 | frozen: true 2186 | } 2187 | } 2188 | 2189 | layer{ 2190 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn/relu" 2191 | type: "ReLU" 2192 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 2193 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 2194 | } 2195 | 2196 | layer{ 2197 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3" 2198 | type: "Convolution" 2199 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv1_1x1/bn" 2200 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3" 2201 | convolution_param { 2202 | num_output: 128 2203 | pad: 1 2204 | kernel_size: 3 2205 | stride: 1 2206 | bias_term: false 2207 | } 2208 | } 2209 | 2210 | layer{ 2211 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 2212 | type: "BN" 2213 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3" 2214 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 2215 | bn_param { 2216 | frozen: true 2217 | } 2218 | } 2219 | 2220 | layer{ 2221 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn/relu" 2222 | type: "ReLU" 2223 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 2224 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 2225 | } 2226 | 2227 | layer{ 2228 | name: "AttentionB_1/mask/down_sample/res2_1/branch1/conv3_1x1" 2229 | type: "Convolution" 2230 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv2_3x3/bn" 2231 | top: "AttentionB_1/mask/down_sample/res2_1/branch1/conv3_1x1" 2232 | convolution_param { 2233 | num_output: 512 2234 | pad: 0 2235 | kernel_size: 1 2236 | stride: 1 2237 | bias_term: false 2238 | } 2239 | } 2240 | 2241 | layer{ 2242 | name: "AttentionB_1/mask/down_sample/res2_1" 2243 | type: "Eltwise" 2244 | bottom: "AttentionB_1/mask/down_sample/res2_1/branch1/conv3_1x1" 2245 | bottom: "AttentionB_1/mask/down_sample/pool2_3x3_s2" 2246 | top: "AttentionB_1/mask/down_sample/res2_1" 2247 | eltwise_param { 2248 | operation: SUM 2249 | } 2250 | } 2251 | 2252 | layer{ 2253 | name: "AttentionB_1/mask/down_sample/res2_1/bn" 2254 | type: "BN" 2255 | bottom: "AttentionB_1/mask/down_sample/res2_1" 2256 | top: "AttentionB_1/mask/down_sample/res2_1/bn" 2257 | bn_param { 2258 | frozen: true 2259 | } 2260 | } 2261 | 2262 | layer{ 2263 | name: "AttentionB_1/mask/down_sample/res2_1/bn/relu" 2264 | type: "ReLU" 2265 | bottom: "AttentionB_1/mask/down_sample/res2_1/bn" 2266 | top: "AttentionB_1/mask/down_sample/res2_1/bn" 2267 | } 2268 | 2269 | layer{ 2270 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1" 2271 | type: "Convolution" 2272 | bottom: "AttentionB_1/mask/down_sample/res2_1/bn" 2273 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1" 2274 | convolution_param { 2275 | num_output: 128 2276 | pad: 0 2277 | kernel_size: 1 2278 | stride: 1 2279 | bias_term: false 2280 | } 2281 | } 2282 | 2283 | layer{ 2284 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn" 2285 | type: "BN" 2286 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1" 2287 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn" 2288 | bn_param { 2289 | frozen: true 2290 | } 2291 | } 2292 | 2293 | layer{ 2294 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn/relu" 2295 | type: "ReLU" 2296 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn" 2297 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn" 2298 | } 2299 | 2300 | layer{ 2301 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3" 2302 | type: "Convolution" 2303 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv1_1x1/bn" 2304 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3" 2305 | convolution_param { 2306 | num_output: 128 2307 | pad: 1 2308 | kernel_size: 3 2309 | stride: 1 2310 | bias_term: false 2311 | } 2312 | } 2313 | 2314 | layer{ 2315 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn" 2316 | type: "BN" 2317 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3" 2318 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn" 2319 | bn_param { 2320 | frozen: true 2321 | } 2322 | } 2323 | 2324 | layer{ 2325 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn/relu" 2326 | type: "ReLU" 2327 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn" 2328 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn" 2329 | } 2330 | 2331 | layer{ 2332 | name: "AttentionB_1/mask/down_sample/res2_2/branch1/conv3_1x1" 2333 | type: "Convolution" 2334 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv2_3x3/bn" 2335 | top: "AttentionB_1/mask/down_sample/res2_2/branch1/conv3_1x1" 2336 | convolution_param { 2337 | num_output: 512 2338 | pad: 0 2339 | kernel_size: 1 2340 | stride: 1 2341 | bias_term: false 2342 | } 2343 | } 2344 | 2345 | layer{ 2346 | name: "AttentionB_1/mask/down_sample/res2_2" 2347 | type: "Eltwise" 2348 | bottom: "AttentionB_1/mask/down_sample/res2_2/branch1/conv3_1x1" 2349 | bottom: "AttentionB_1/mask/down_sample/res2_1" 2350 | top: "AttentionB_1/mask/down_sample/res2_2" 2351 | eltwise_param { 2352 | operation: SUM 2353 | } 2354 | } 2355 | 2356 | layer{ 2357 | name: "AttentionB_1/mask/up_sample/interp_2" 2358 | type: "Interp" 2359 | bottom: "AttentionB_1/mask/down_sample/res2_2" 2360 | bottom: "AttentionB_1/mask/down_sample/res1_1" 2361 | top: "AttentionB_1/mask/up_sample/interp_2" 2362 | } 2363 | 2364 | layer{ 2365 | name: "AttentionB_1/mask/down_sample/res1_1/bn" 2366 | type: "BN" 2367 | bottom: "AttentionB_1/mask/down_sample/res1_1" 2368 | top: "AttentionB_1/mask/down_sample/res1_1/bn" 2369 | bn_param { 2370 | frozen: true 2371 | } 2372 | } 2373 | 2374 | layer{ 2375 | name: "AttentionB_1/mask/down_sample/res1_1/bn/relu" 2376 | type: "ReLU" 2377 | bottom: "AttentionB_1/mask/down_sample/res1_1/bn" 2378 | top: "AttentionB_1/mask/down_sample/res1_1/bn" 2379 | } 2380 | 2381 | layer{ 2382 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2383 | type: "Convolution" 2384 | bottom: "AttentionB_1/mask/down_sample/res1_1/bn" 2385 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2386 | convolution_param { 2387 | num_output: 128 2388 | pad: 0 2389 | kernel_size: 1 2390 | stride: 1 2391 | bias_term: false 2392 | } 2393 | } 2394 | 2395 | layer{ 2396 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2397 | type: "BN" 2398 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2399 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2400 | bn_param { 2401 | frozen: true 2402 | } 2403 | } 2404 | 2405 | layer{ 2406 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn/relu" 2407 | type: "ReLU" 2408 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2409 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2410 | } 2411 | 2412 | layer{ 2413 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2414 | type: "Convolution" 2415 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2416 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2417 | convolution_param { 2418 | num_output: 128 2419 | pad: 1 2420 | kernel_size: 3 2421 | stride: 1 2422 | bias_term: false 2423 | } 2424 | } 2425 | 2426 | layer{ 2427 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2428 | type: "BN" 2429 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2430 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2431 | bn_param { 2432 | frozen: true 2433 | } 2434 | } 2435 | 2436 | layer{ 2437 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn/relu" 2438 | type: "ReLU" 2439 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2440 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2441 | } 2442 | 2443 | layer{ 2444 | name: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2445 | type: "Convolution" 2446 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2447 | top: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2448 | convolution_param { 2449 | num_output: 512 2450 | pad: 0 2451 | kernel_size: 1 2452 | stride: 1 2453 | bias_term: false 2454 | } 2455 | } 2456 | 2457 | layer{ 2458 | name: "AttentionB_1/mask/skip/res1" 2459 | type: "Eltwise" 2460 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2461 | bottom: "AttentionB_1/mask/down_sample/res1_1" 2462 | top: "AttentionB_1/mask/skip/res1" 2463 | eltwise_param { 2464 | operation: SUM 2465 | } 2466 | } 2467 | 2468 | layer{ 2469 | name: "AttentionB_1/mask/up_sample1" 2470 | type: "Eltwise" 2471 | bottom: "AttentionB_1/mask/skip/res1" 2472 | bottom: "AttentionB_1/mask/up_sample/interp_2" 2473 | top: "AttentionB_1/mask/up_sample1" 2474 | eltwise_param { 2475 | operation: SUM 2476 | } 2477 | } 2478 | 2479 | layer{ 2480 | name: "AttentionB_1/mask/up_sample1/bn" 2481 | type: "BN" 2482 | bottom: "AttentionB_1/mask/up_sample1" 2483 | top: "AttentionB_1/mask/up_sample1/bn" 2484 | bn_param { 2485 | frozen: true 2486 | } 2487 | } 2488 | 2489 | layer{ 2490 | name: "AttentionB_1/mask/up_sample1/bn/relu" 2491 | type: "ReLU" 2492 | bottom: "AttentionB_1/mask/up_sample1/bn" 2493 | top: "AttentionB_1/mask/up_sample1/bn" 2494 | } 2495 | 2496 | layer{ 2497 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1" 2498 | type: "Convolution" 2499 | bottom: "AttentionB_1/mask/up_sample1/bn" 2500 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1" 2501 | convolution_param { 2502 | num_output: 128 2503 | pad: 0 2504 | kernel_size: 1 2505 | stride: 1 2506 | bias_term: false 2507 | } 2508 | } 2509 | 2510 | layer{ 2511 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 2512 | type: "BN" 2513 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1" 2514 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 2515 | bn_param { 2516 | frozen: true 2517 | } 2518 | } 2519 | 2520 | layer{ 2521 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn/relu" 2522 | type: "ReLU" 2523 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 2524 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 2525 | } 2526 | 2527 | layer{ 2528 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3" 2529 | type: "Convolution" 2530 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv1_1x1/bn" 2531 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3" 2532 | convolution_param { 2533 | num_output: 128 2534 | pad: 1 2535 | kernel_size: 3 2536 | stride: 1 2537 | bias_term: false 2538 | } 2539 | } 2540 | 2541 | layer{ 2542 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 2543 | type: "BN" 2544 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3" 2545 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 2546 | bn_param { 2547 | frozen: true 2548 | } 2549 | } 2550 | 2551 | layer{ 2552 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn/relu" 2553 | type: "ReLU" 2554 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 2555 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 2556 | } 2557 | 2558 | layer{ 2559 | name: "AttentionB_1/mask/up_sample/res1_1/branch1/conv3_1x1" 2560 | type: "Convolution" 2561 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv2_3x3/bn" 2562 | top: "AttentionB_1/mask/up_sample/res1_1/branch1/conv3_1x1" 2563 | convolution_param { 2564 | num_output: 512 2565 | pad: 0 2566 | kernel_size: 1 2567 | stride: 1 2568 | bias_term: false 2569 | } 2570 | } 2571 | 2572 | layer{ 2573 | name: "AttentionB_1/mask/up_sample/res1_1" 2574 | type: "Eltwise" 2575 | bottom: "AttentionB_1/mask/up_sample/res1_1/branch1/conv3_1x1" 2576 | bottom: "AttentionB_1/mask/up_sample1" 2577 | top: "AttentionB_1/mask/up_sample/res1_1" 2578 | eltwise_param { 2579 | operation: SUM 2580 | } 2581 | } 2582 | 2583 | layer{ 2584 | name: "AttentionB_1/mask/up_sample/interp_1" 2585 | type: "Interp" 2586 | bottom: "AttentionB_1/mask/up_sample/res1_1" 2587 | bottom: "AttentionB_1/trunk/res3" 2588 | top: "AttentionB_1/mask/up_sample/interp_1" 2589 | } 2590 | 2591 | layer{ 2592 | name: "AttentionB_1/mask/up_sample/interp_1/bn" 2593 | type: "BN" 2594 | bottom: "AttentionB_1/mask/up_sample/interp_1" 2595 | top: "AttentionB_1/mask/up_sample/interp_1/bn" 2596 | bn_param { 2597 | frozen: true 2598 | } 2599 | } 2600 | 2601 | layer{ 2602 | name: "AttentionB_1/mask/up_sample/interp_1/bn/relu" 2603 | type: "ReLU" 2604 | bottom: "AttentionB_1/mask/up_sample/interp_1/bn" 2605 | top: "AttentionB_1/mask/up_sample/interp_1/bn" 2606 | } 2607 | 2608 | layer{ 2609 | name: "AttentionB_1/mask/linear_1" 2610 | type: "Convolution" 2611 | bottom: "AttentionB_1/mask/up_sample/interp_1/bn" 2612 | top: "AttentionB_1/mask/linear_1" 2613 | convolution_param { 2614 | num_output: 512 2615 | pad: 0 2616 | kernel_size: 1 2617 | stride: 1 2618 | bias_term: false 2619 | } 2620 | } 2621 | 2622 | layer{ 2623 | name: "AttentionB_1/mask/linear_1/bn" 2624 | type: "BN" 2625 | bottom: "AttentionB_1/mask/linear_1" 2626 | top: "AttentionB_1/mask/linear_1/bn" 2627 | bn_param { 2628 | frozen: true 2629 | } 2630 | } 2631 | 2632 | layer{ 2633 | name: "AttentionB_1/mask/linear_1/bn/relu" 2634 | type: "ReLU" 2635 | bottom: "AttentionB_1/mask/linear_1/bn" 2636 | top: "AttentionB_1/mask/linear_1/bn" 2637 | } 2638 | 2639 | layer{ 2640 | name: "AttentionB_1/mask/linear_2" 2641 | type: "Convolution" 2642 | bottom: "AttentionB_1/mask/linear_1/bn" 2643 | top: "AttentionB_1/mask/linear_2" 2644 | convolution_param { 2645 | num_output: 512 2646 | pad: 0 2647 | kernel_size: 1 2648 | stride: 1 2649 | bias_term: false 2650 | } 2651 | } 2652 | 2653 | layer{ 2654 | name: "AttentionB_1/mask" 2655 | type: "Sigmoid" 2656 | bottom: "AttentionB_1/mask/linear_2" 2657 | top: "AttentionB_1/mask" 2658 | } 2659 | 2660 | layer{ 2661 | name: "AttentionB_1_residual" 2662 | type: "Eltwise" 2663 | bottom: "AttentionB_1/trunk/res3" 2664 | bottom: "AttentionB_1/mask" 2665 | top: "AttentionB_1_residual" 2666 | eltwise_param { 2667 | operation: PROD 2668 | } 2669 | } 2670 | 2671 | layer{ 2672 | name: "AttentionB_1/fusion" 2673 | type: "Eltwise" 2674 | bottom: "AttentionB_1_residual" 2675 | bottom: "AttentionB_1/trunk/res3" 2676 | top: "AttentionB_1/fusion" 2677 | eltwise_param { 2678 | operation: SUM 2679 | } 2680 | } 2681 | 2682 | layer{ 2683 | name: "AttentionB_1/fusion/bn" 2684 | type: "BN" 2685 | bottom: "AttentionB_1/fusion" 2686 | top: "AttentionB_1/fusion/bn" 2687 | bn_param { 2688 | frozen: true 2689 | } 2690 | } 2691 | 2692 | layer{ 2693 | name: "AttentionB_1/fusion/bn/relu" 2694 | type: "ReLU" 2695 | bottom: "AttentionB_1/fusion/bn" 2696 | top: "AttentionB_1/fusion/bn" 2697 | } 2698 | 2699 | layer{ 2700 | name: "AttentionB_1/branch1/conv1_1x1" 2701 | type: "Convolution" 2702 | bottom: "AttentionB_1/fusion/bn" 2703 | top: "AttentionB_1/branch1/conv1_1x1" 2704 | convolution_param { 2705 | num_output: 128 2706 | pad: 0 2707 | kernel_size: 1 2708 | stride: 1 2709 | bias_term: false 2710 | } 2711 | } 2712 | 2713 | layer{ 2714 | name: "AttentionB_1/branch1/conv1_1x1/bn" 2715 | type: "BN" 2716 | bottom: "AttentionB_1/branch1/conv1_1x1" 2717 | top: "AttentionB_1/branch1/conv1_1x1/bn" 2718 | bn_param { 2719 | frozen: true 2720 | } 2721 | } 2722 | 2723 | layer{ 2724 | name: "AttentionB_1/branch1/conv1_1x1/bn/relu" 2725 | type: "ReLU" 2726 | bottom: "AttentionB_1/branch1/conv1_1x1/bn" 2727 | top: "AttentionB_1/branch1/conv1_1x1/bn" 2728 | } 2729 | 2730 | layer{ 2731 | name: "AttentionB_1/branch1/conv2_3x3" 2732 | type: "Convolution" 2733 | bottom: "AttentionB_1/branch1/conv1_1x1/bn" 2734 | top: "AttentionB_1/branch1/conv2_3x3" 2735 | convolution_param { 2736 | num_output: 128 2737 | pad: 1 2738 | kernel_size: 3 2739 | stride: 1 2740 | bias_term: false 2741 | } 2742 | } 2743 | 2744 | layer{ 2745 | name: "AttentionB_1/branch1/conv2_3x3/bn" 2746 | type: "BN" 2747 | bottom: "AttentionB_1/branch1/conv2_3x3" 2748 | top: "AttentionB_1/branch1/conv2_3x3/bn" 2749 | bn_param { 2750 | frozen: true 2751 | } 2752 | } 2753 | 2754 | layer{ 2755 | name: "AttentionB_1/branch1/conv2_3x3/bn/relu" 2756 | type: "ReLU" 2757 | bottom: "AttentionB_1/branch1/conv2_3x3/bn" 2758 | top: "AttentionB_1/branch1/conv2_3x3/bn" 2759 | } 2760 | 2761 | layer{ 2762 | name: "AttentionB_1/branch1/conv3_1x1" 2763 | type: "Convolution" 2764 | bottom: "AttentionB_1/branch1/conv2_3x3/bn" 2765 | top: "AttentionB_1/branch1/conv3_1x1" 2766 | convolution_param { 2767 | num_output: 512 2768 | pad: 0 2769 | kernel_size: 1 2770 | stride: 1 2771 | bias_term: false 2772 | } 2773 | } 2774 | 2775 | layer{ 2776 | name: "AttentionB_1" 2777 | type: "Eltwise" 2778 | bottom: "AttentionB_1/branch1/conv3_1x1" 2779 | bottom: "AttentionB_1/fusion" 2780 | top: "AttentionB_1" 2781 | eltwise_param { 2782 | operation: SUM 2783 | } 2784 | } 2785 | 2786 | layer{ 2787 | name: "AttentionB_1/bn" 2788 | type: "BN" 2789 | bottom: "AttentionB_1" 2790 | top: "AttentionB_1/bn" 2791 | bn_param { 2792 | frozen: true 2793 | } 2794 | } 2795 | 2796 | layer{ 2797 | name: "AttentionB_1/bn/relu" 2798 | type: "ReLU" 2799 | bottom: "AttentionB_1/bn" 2800 | top: "AttentionB_1/bn" 2801 | } 2802 | 2803 | layer{ 2804 | name: "pre_res_3/branch1/conv1_1x1" 2805 | type: "Convolution" 2806 | bottom: "AttentionB_1/bn" 2807 | top: "pre_res_3/branch1/conv1_1x1" 2808 | convolution_param { 2809 | num_output: 256 2810 | pad: 0 2811 | kernel_size: 1 2812 | stride: 1 2813 | bias_term: false 2814 | } 2815 | } 2816 | 2817 | layer{ 2818 | name: "pre_res_3/branch1/conv1_1x1/bn" 2819 | type: "BN" 2820 | bottom: "pre_res_3/branch1/conv1_1x1" 2821 | top: "pre_res_3/branch1/conv1_1x1/bn" 2822 | bn_param { 2823 | frozen: true 2824 | } 2825 | } 2826 | 2827 | layer{ 2828 | name: "pre_res_3/branch1/conv1_1x1/bn/relu" 2829 | type: "ReLU" 2830 | bottom: "pre_res_3/branch1/conv1_1x1/bn" 2831 | top: "pre_res_3/branch1/conv1_1x1/bn" 2832 | } 2833 | 2834 | layer{ 2835 | name: "pre_res_3/branch1/conv2_3x3" 2836 | type: "Convolution" 2837 | bottom: "pre_res_3/branch1/conv1_1x1/bn" 2838 | top: "pre_res_3/branch1/conv2_3x3" 2839 | convolution_param { 2840 | num_output: 256 2841 | pad: 1 2842 | kernel_size: 3 2843 | stride: 2 2844 | bias_term: false 2845 | } 2846 | } 2847 | 2848 | layer{ 2849 | name: "pre_res_3/branch1/conv2_3x3/bn" 2850 | type: "BN" 2851 | bottom: "pre_res_3/branch1/conv2_3x3" 2852 | top: "pre_res_3/branch1/conv2_3x3/bn" 2853 | bn_param { 2854 | frozen: true 2855 | } 2856 | } 2857 | 2858 | layer{ 2859 | name: "pre_res_3/branch1/conv2_3x3/bn/relu" 2860 | type: "ReLU" 2861 | bottom: "pre_res_3/branch1/conv2_3x3/bn" 2862 | top: "pre_res_3/branch1/conv2_3x3/bn" 2863 | } 2864 | 2865 | layer{ 2866 | name: "pre_res_3/branch1/conv3_1x1" 2867 | type: "Convolution" 2868 | bottom: "pre_res_3/branch1/conv2_3x3/bn" 2869 | top: "pre_res_3/branch1/conv3_1x1" 2870 | convolution_param { 2871 | num_output: 1024 2872 | pad: 0 2873 | kernel_size: 1 2874 | stride: 1 2875 | bias_term: false 2876 | } 2877 | } 2878 | 2879 | layer{ 2880 | name: "pre_res_3/branch2/conv1_1x1" 2881 | type: "Convolution" 2882 | bottom: "AttentionB_1/bn" 2883 | top: "pre_res_3/branch2/conv1_1x1" 2884 | convolution_param { 2885 | num_output: 1024 2886 | pad: 0 2887 | kernel_size: 1 2888 | stride: 2 2889 | bias_term: false 2890 | } 2891 | } 2892 | 2893 | layer{ 2894 | name: "pre_res_3" 2895 | type: "Eltwise" 2896 | bottom: "pre_res_3/branch2/conv1_1x1" 2897 | bottom: "pre_res_3/branch1/conv3_1x1" 2898 | top: "pre_res_3" 2899 | eltwise_param { 2900 | operation: SUM 2901 | } 2902 | } 2903 | 2904 | layer{ 2905 | name: "pre_res_3/bn" 2906 | type: "BN" 2907 | bottom: "pre_res_3" 2908 | top: "pre_res_3/bn" 2909 | bn_param { 2910 | frozen: true 2911 | } 2912 | } 2913 | 2914 | layer{ 2915 | name: "pre_res_3/bn/relu" 2916 | type: "ReLU" 2917 | bottom: "pre_res_3/bn" 2918 | top: "pre_res_3/bn" 2919 | } 2920 | 2921 | layer{ 2922 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 2923 | type: "Convolution" 2924 | bottom: "pre_res_3/bn" 2925 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 2926 | convolution_param { 2927 | num_output: 256 2928 | pad: 0 2929 | kernel_size: 1 2930 | stride: 1 2931 | bias_term: false 2932 | } 2933 | } 2934 | 2935 | layer{ 2936 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 2937 | type: "BN" 2938 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 2939 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 2940 | bn_param { 2941 | frozen: true 2942 | } 2943 | } 2944 | 2945 | layer{ 2946 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn/relu" 2947 | type: "ReLU" 2948 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 2949 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 2950 | } 2951 | 2952 | layer{ 2953 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 2954 | type: "Convolution" 2955 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 2956 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 2957 | convolution_param { 2958 | num_output: 256 2959 | pad: 1 2960 | kernel_size: 3 2961 | stride: 1 2962 | bias_term: false 2963 | } 2964 | } 2965 | 2966 | layer{ 2967 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 2968 | type: "BN" 2969 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 2970 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 2971 | bn_param { 2972 | frozen: true 2973 | } 2974 | } 2975 | 2976 | layer{ 2977 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn/relu" 2978 | type: "ReLU" 2979 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 2980 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 2981 | } 2982 | 2983 | layer{ 2984 | name: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 2985 | type: "Convolution" 2986 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 2987 | top: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 2988 | convolution_param { 2989 | num_output: 1024 2990 | pad: 0 2991 | kernel_size: 1 2992 | stride: 1 2993 | bias_term: false 2994 | } 2995 | } 2996 | 2997 | layer{ 2998 | name: "AttentionC_1/trunk/res1" 2999 | type: "Eltwise" 3000 | bottom: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 3001 | bottom: "pre_res_3" 3002 | top: "AttentionC_1/trunk/res1" 3003 | eltwise_param { 3004 | operation: SUM 3005 | } 3006 | } 3007 | 3008 | layer{ 3009 | name: "AttentionC_1/trunk/res1/bn" 3010 | type: "BN" 3011 | bottom: "AttentionC_1/trunk/res1" 3012 | top: "AttentionC_1/trunk/res1/bn" 3013 | bn_param { 3014 | frozen: true 3015 | } 3016 | } 3017 | 3018 | layer{ 3019 | name: "AttentionC_1/trunk/res1/bn/relu" 3020 | type: "ReLU" 3021 | bottom: "AttentionC_1/trunk/res1/bn" 3022 | top: "AttentionC_1/trunk/res1/bn" 3023 | } 3024 | 3025 | layer{ 3026 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3027 | type: "Convolution" 3028 | bottom: "AttentionC_1/trunk/res1/bn" 3029 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3030 | convolution_param { 3031 | num_output: 256 3032 | pad: 0 3033 | kernel_size: 1 3034 | stride: 1 3035 | bias_term: false 3036 | } 3037 | } 3038 | 3039 | layer{ 3040 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3041 | type: "BN" 3042 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3043 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3044 | bn_param { 3045 | frozen: true 3046 | } 3047 | } 3048 | 3049 | layer{ 3050 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn/relu" 3051 | type: "ReLU" 3052 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3053 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3054 | } 3055 | 3056 | layer{ 3057 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3058 | type: "Convolution" 3059 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3060 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3061 | convolution_param { 3062 | num_output: 256 3063 | pad: 1 3064 | kernel_size: 3 3065 | stride: 1 3066 | bias_term: false 3067 | } 3068 | } 3069 | 3070 | layer{ 3071 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3072 | type: "BN" 3073 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3074 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3075 | bn_param { 3076 | frozen: true 3077 | } 3078 | } 3079 | 3080 | layer{ 3081 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn/relu" 3082 | type: "ReLU" 3083 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3084 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3085 | } 3086 | 3087 | layer{ 3088 | name: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3089 | type: "Convolution" 3090 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3091 | top: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3092 | convolution_param { 3093 | num_output: 1024 3094 | pad: 0 3095 | kernel_size: 1 3096 | stride: 1 3097 | bias_term: false 3098 | } 3099 | } 3100 | 3101 | layer{ 3102 | name: "AttentionC_1/trunk/res2" 3103 | type: "Eltwise" 3104 | bottom: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3105 | bottom: "AttentionC_1/trunk/res1" 3106 | top: "AttentionC_1/trunk/res2" 3107 | eltwise_param { 3108 | operation: SUM 3109 | } 3110 | } 3111 | 3112 | layer{ 3113 | name: "AttentionC_1/trunk/res2/bn" 3114 | type: "BN" 3115 | bottom: "AttentionC_1/trunk/res2" 3116 | top: "AttentionC_1/trunk/res2/bn" 3117 | bn_param { 3118 | frozen: true 3119 | } 3120 | } 3121 | 3122 | layer{ 3123 | name: "AttentionC_1/trunk/res2/bn/relu" 3124 | type: "ReLU" 3125 | bottom: "AttentionC_1/trunk/res2/bn" 3126 | top: "AttentionC_1/trunk/res2/bn" 3127 | } 3128 | 3129 | layer{ 3130 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3131 | type: "Convolution" 3132 | bottom: "AttentionC_1/trunk/res2/bn" 3133 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3134 | convolution_param { 3135 | num_output: 256 3136 | pad: 0 3137 | kernel_size: 1 3138 | stride: 1 3139 | bias_term: false 3140 | } 3141 | } 3142 | 3143 | layer{ 3144 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3145 | type: "BN" 3146 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3147 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3148 | bn_param { 3149 | frozen: true 3150 | } 3151 | } 3152 | 3153 | layer{ 3154 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn/relu" 3155 | type: "ReLU" 3156 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3157 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3158 | } 3159 | 3160 | layer{ 3161 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3162 | type: "Convolution" 3163 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3164 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3165 | convolution_param { 3166 | num_output: 256 3167 | pad: 1 3168 | kernel_size: 3 3169 | stride: 1 3170 | bias_term: false 3171 | } 3172 | } 3173 | 3174 | layer{ 3175 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3176 | type: "BN" 3177 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3178 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3179 | bn_param { 3180 | frozen: true 3181 | } 3182 | } 3183 | 3184 | layer{ 3185 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn/relu" 3186 | type: "ReLU" 3187 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3188 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3189 | } 3190 | 3191 | layer{ 3192 | name: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3193 | type: "Convolution" 3194 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3195 | top: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3196 | convolution_param { 3197 | num_output: 1024 3198 | pad: 0 3199 | kernel_size: 1 3200 | stride: 1 3201 | bias_term: false 3202 | } 3203 | } 3204 | 3205 | layer{ 3206 | name: "AttentionC_1/trunk/res3" 3207 | type: "Eltwise" 3208 | bottom: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3209 | bottom: "AttentionC_1/trunk/res2" 3210 | top: "AttentionC_1/trunk/res3" 3211 | eltwise_param { 3212 | operation: SUM 3213 | } 3214 | } 3215 | 3216 | layer{ 3217 | name: "AttentionC_1/mask/down_sample/pool1_3x3_s2" 3218 | type: "Pooling" 3219 | bottom: "AttentionC_1/trunk/res1" 3220 | top: "AttentionC_1/mask/down_sample/pool1_3x3_s2" 3221 | pooling_param { 3222 | pool: MAX 3223 | kernel_size: 3 3224 | stride: 2 3225 | } 3226 | } 3227 | 3228 | layer{ 3229 | name: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn" 3230 | type: "BN" 3231 | bottom: "AttentionC_1/mask/down_sample/pool1_3x3_s2" 3232 | top: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn" 3233 | bn_param { 3234 | frozen: true 3235 | } 3236 | } 3237 | 3238 | layer{ 3239 | name: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn/relu" 3240 | type: "ReLU" 3241 | bottom: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn" 3242 | top: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn" 3243 | } 3244 | 3245 | layer{ 3246 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1" 3247 | type: "Convolution" 3248 | bottom: "AttentionC_1/mask/down_sample/pool1_3x3_s2/bn" 3249 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1" 3250 | convolution_param { 3251 | num_output: 256 3252 | pad: 0 3253 | kernel_size: 1 3254 | stride: 1 3255 | bias_term: false 3256 | } 3257 | } 3258 | 3259 | layer{ 3260 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 3261 | type: "BN" 3262 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1" 3263 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 3264 | bn_param { 3265 | frozen: true 3266 | } 3267 | } 3268 | 3269 | layer{ 3270 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn/relu" 3271 | type: "ReLU" 3272 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 3273 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 3274 | } 3275 | 3276 | layer{ 3277 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3" 3278 | type: "Convolution" 3279 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv1_1x1/bn" 3280 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3" 3281 | convolution_param { 3282 | num_output: 256 3283 | pad: 1 3284 | kernel_size: 3 3285 | stride: 1 3286 | bias_term: false 3287 | } 3288 | } 3289 | 3290 | layer{ 3291 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 3292 | type: "BN" 3293 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3" 3294 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 3295 | bn_param { 3296 | frozen: true 3297 | } 3298 | } 3299 | 3300 | layer{ 3301 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn/relu" 3302 | type: "ReLU" 3303 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 3304 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 3305 | } 3306 | 3307 | layer{ 3308 | name: "AttentionC_1/mask/down_sample/res1_1/branch1/conv3_1x1" 3309 | type: "Convolution" 3310 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv2_3x3/bn" 3311 | top: "AttentionC_1/mask/down_sample/res1_1/branch1/conv3_1x1" 3312 | convolution_param { 3313 | num_output: 1024 3314 | pad: 0 3315 | kernel_size: 1 3316 | stride: 1 3317 | bias_term: false 3318 | } 3319 | } 3320 | 3321 | layer{ 3322 | name: "AttentionC_1/mask/down_sample/res1_1" 3323 | type: "Eltwise" 3324 | bottom: "AttentionC_1/mask/down_sample/res1_1/branch1/conv3_1x1" 3325 | bottom: "AttentionC_1/mask/down_sample/pool1_3x3_s2" 3326 | top: "AttentionC_1/mask/down_sample/res1_1" 3327 | eltwise_param { 3328 | operation: SUM 3329 | } 3330 | } 3331 | 3332 | layer{ 3333 | name: "AttentionC_1/mask/down_sample/res1_1/bn" 3334 | type: "BN" 3335 | bottom: "AttentionC_1/mask/down_sample/res1_1" 3336 | top: "AttentionC_1/mask/down_sample/res1_1/bn" 3337 | bn_param { 3338 | frozen: true 3339 | } 3340 | } 3341 | 3342 | layer{ 3343 | name: "AttentionC_1/mask/down_sample/res1_1/bn/relu" 3344 | type: "ReLU" 3345 | bottom: "AttentionC_1/mask/down_sample/res1_1/bn" 3346 | top: "AttentionC_1/mask/down_sample/res1_1/bn" 3347 | } 3348 | 3349 | layer{ 3350 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1" 3351 | type: "Convolution" 3352 | bottom: "AttentionC_1/mask/down_sample/res1_1/bn" 3353 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1" 3354 | convolution_param { 3355 | num_output: 256 3356 | pad: 0 3357 | kernel_size: 1 3358 | stride: 1 3359 | bias_term: false 3360 | } 3361 | } 3362 | 3363 | layer{ 3364 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn" 3365 | type: "BN" 3366 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1" 3367 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn" 3368 | bn_param { 3369 | frozen: true 3370 | } 3371 | } 3372 | 3373 | layer{ 3374 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn/relu" 3375 | type: "ReLU" 3376 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn" 3377 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn" 3378 | } 3379 | 3380 | layer{ 3381 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3" 3382 | type: "Convolution" 3383 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv1_1x1/bn" 3384 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3" 3385 | convolution_param { 3386 | num_output: 256 3387 | pad: 1 3388 | kernel_size: 3 3389 | stride: 1 3390 | bias_term: false 3391 | } 3392 | } 3393 | 3394 | layer{ 3395 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn" 3396 | type: "BN" 3397 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3" 3398 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn" 3399 | bn_param { 3400 | frozen: true 3401 | } 3402 | } 3403 | 3404 | layer{ 3405 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn/relu" 3406 | type: "ReLU" 3407 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn" 3408 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn" 3409 | } 3410 | 3411 | layer{ 3412 | name: "AttentionC_1/mask/down_sample/res1_2/branch1/conv3_1x1" 3413 | type: "Convolution" 3414 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv2_3x3/bn" 3415 | top: "AttentionC_1/mask/down_sample/res1_2/branch1/conv3_1x1" 3416 | convolution_param { 3417 | num_output: 1024 3418 | pad: 0 3419 | kernel_size: 1 3420 | stride: 1 3421 | bias_term: false 3422 | } 3423 | } 3424 | 3425 | layer{ 3426 | name: "AttentionC_1/mask/down_sample/res1_2" 3427 | type: "Eltwise" 3428 | bottom: "AttentionC_1/mask/down_sample/res1_2/branch1/conv3_1x1" 3429 | bottom: "AttentionC_1/mask/down_sample/res1_1" 3430 | top: "AttentionC_1/mask/down_sample/res1_2" 3431 | eltwise_param { 3432 | operation: SUM 3433 | } 3434 | } 3435 | 3436 | layer{ 3437 | name: "AttentionC_1/mask/up_sample/interp_1" 3438 | type: "Interp" 3439 | bottom: "AttentionC_1/mask/down_sample/res1_2" 3440 | bottom: "AttentionC_1/trunk/res3" 3441 | top: "AttentionC_1/mask/up_sample/interp_1" 3442 | } 3443 | 3444 | layer{ 3445 | name: "AttentionC_1/mask/up_sample/interp_1/bn" 3446 | type: "BN" 3447 | bottom: "AttentionC_1/mask/up_sample/interp_1" 3448 | top: "AttentionC_1/mask/up_sample/interp_1/bn" 3449 | bn_param { 3450 | frozen: true 3451 | } 3452 | } 3453 | 3454 | layer{ 3455 | name: "AttentionC_1/mask/up_sample/interp_1/bn/relu" 3456 | type: "ReLU" 3457 | bottom: "AttentionC_1/mask/up_sample/interp_1/bn" 3458 | top: "AttentionC_1/mask/up_sample/interp_1/bn" 3459 | } 3460 | 3461 | layer{ 3462 | name: "AttentionC_1/mask/linear_1" 3463 | type: "Convolution" 3464 | bottom: "AttentionC_1/mask/up_sample/interp_1/bn" 3465 | top: "AttentionC_1/mask/linear_1" 3466 | convolution_param { 3467 | num_output: 1024 3468 | pad: 0 3469 | kernel_size: 1 3470 | stride: 1 3471 | bias_term: false 3472 | } 3473 | } 3474 | 3475 | layer{ 3476 | name: "AttentionC_1/mask/linear_1/bn" 3477 | type: "BN" 3478 | bottom: "AttentionC_1/mask/linear_1" 3479 | top: "AttentionC_1/mask/linear_1/bn" 3480 | bn_param { 3481 | frozen: true 3482 | } 3483 | } 3484 | 3485 | layer{ 3486 | name: "AttentionC_1/mask/linear_1/bn/relu" 3487 | type: "ReLU" 3488 | bottom: "AttentionC_1/mask/linear_1/bn" 3489 | top: "AttentionC_1/mask/linear_1/bn" 3490 | } 3491 | 3492 | layer{ 3493 | name: "AttentionC_1/mask/linear_2" 3494 | type: "Convolution" 3495 | bottom: "AttentionC_1/mask/linear_1/bn" 3496 | top: "AttentionC_1/mask/linear_2" 3497 | convolution_param { 3498 | num_output: 1024 3499 | pad: 0 3500 | kernel_size: 1 3501 | stride: 1 3502 | bias_term: false 3503 | } 3504 | } 3505 | 3506 | layer{ 3507 | name: "AttentionC_1/mask" 3508 | type: "Sigmoid" 3509 | bottom: "AttentionC_1/mask/linear_2" 3510 | top: "AttentionC_1/mask" 3511 | } 3512 | 3513 | layer{ 3514 | name: "AttentionC_1_residual" 3515 | type: "Eltwise" 3516 | bottom: "AttentionC_1/trunk/res3" 3517 | bottom: "AttentionC_1/mask" 3518 | top: "AttentionC_1_residual" 3519 | eltwise_param { 3520 | operation: PROD 3521 | } 3522 | } 3523 | 3524 | layer{ 3525 | name: "AttentionC_1/fusion" 3526 | type: "Eltwise" 3527 | bottom: "AttentionC_1_residual" 3528 | bottom: "AttentionC_1/trunk/res3" 3529 | top: "AttentionC_1/fusion" 3530 | eltwise_param { 3531 | operation: SUM 3532 | } 3533 | } 3534 | 3535 | layer{ 3536 | name: "AttentionC_1/fusion/bn" 3537 | type: "BN" 3538 | bottom: "AttentionC_1/fusion" 3539 | top: "AttentionC_1/fusion/bn" 3540 | bn_param { 3541 | frozen: true 3542 | } 3543 | } 3544 | 3545 | layer{ 3546 | name: "AttentionC_1/fusion/bn/relu" 3547 | type: "ReLU" 3548 | bottom: "AttentionC_1/fusion/bn" 3549 | top: "AttentionC_1/fusion/bn" 3550 | } 3551 | 3552 | layer{ 3553 | name: "AttentionC_1/branch1/conv1_1x1" 3554 | type: "Convolution" 3555 | bottom: "AttentionC_1/fusion/bn" 3556 | top: "AttentionC_1/branch1/conv1_1x1" 3557 | convolution_param { 3558 | num_output: 256 3559 | pad: 0 3560 | kernel_size: 1 3561 | stride: 1 3562 | bias_term: false 3563 | } 3564 | } 3565 | 3566 | layer{ 3567 | name: "AttentionC_1/branch1/conv1_1x1/bn" 3568 | type: "BN" 3569 | bottom: "AttentionC_1/branch1/conv1_1x1" 3570 | top: "AttentionC_1/branch1/conv1_1x1/bn" 3571 | bn_param { 3572 | frozen: true 3573 | } 3574 | } 3575 | 3576 | layer{ 3577 | name: "AttentionC_1/branch1/conv1_1x1/bn/relu" 3578 | type: "ReLU" 3579 | bottom: "AttentionC_1/branch1/conv1_1x1/bn" 3580 | top: "AttentionC_1/branch1/conv1_1x1/bn" 3581 | } 3582 | 3583 | layer{ 3584 | name: "AttentionC_1/branch1/conv2_3x3" 3585 | type: "Convolution" 3586 | bottom: "AttentionC_1/branch1/conv1_1x1/bn" 3587 | top: "AttentionC_1/branch1/conv2_3x3" 3588 | convolution_param { 3589 | num_output: 256 3590 | pad: 1 3591 | kernel_size: 3 3592 | stride: 1 3593 | bias_term: false 3594 | } 3595 | } 3596 | 3597 | layer{ 3598 | name: "AttentionC_1/branch1/conv2_3x3/bn" 3599 | type: "BN" 3600 | bottom: "AttentionC_1/branch1/conv2_3x3" 3601 | top: "AttentionC_1/branch1/conv2_3x3/bn" 3602 | bn_param { 3603 | frozen: true 3604 | } 3605 | } 3606 | 3607 | layer{ 3608 | name: "AttentionC_1/branch1/conv2_3x3/bn/relu" 3609 | type: "ReLU" 3610 | bottom: "AttentionC_1/branch1/conv2_3x3/bn" 3611 | top: "AttentionC_1/branch1/conv2_3x3/bn" 3612 | } 3613 | 3614 | layer{ 3615 | name: "AttentionC_1/branch1/conv3_1x1" 3616 | type: "Convolution" 3617 | bottom: "AttentionC_1/branch1/conv2_3x3/bn" 3618 | top: "AttentionC_1/branch1/conv3_1x1" 3619 | convolution_param { 3620 | num_output: 1024 3621 | pad: 0 3622 | kernel_size: 1 3623 | stride: 1 3624 | bias_term: false 3625 | } 3626 | } 3627 | 3628 | layer{ 3629 | name: "AttentionC_1" 3630 | type: "Eltwise" 3631 | bottom: "AttentionC_1/branch1/conv3_1x1" 3632 | bottom: "AttentionC_1/fusion" 3633 | top: "AttentionC_1" 3634 | eltwise_param { 3635 | operation: SUM 3636 | } 3637 | } 3638 | 3639 | layer{ 3640 | name: "AttentionC_1/bn" 3641 | type: "BN" 3642 | bottom: "AttentionC_1" 3643 | top: "AttentionC_1/bn" 3644 | bn_param { 3645 | frozen: true 3646 | } 3647 | } 3648 | 3649 | layer{ 3650 | name: "AttentionC_1/bn/relu" 3651 | type: "ReLU" 3652 | bottom: "AttentionC_1/bn" 3653 | top: "AttentionC_1/bn" 3654 | } 3655 | 3656 | layer{ 3657 | name: "post_res_4_1/branch1/conv1_1x1" 3658 | type: "Convolution" 3659 | bottom: "AttentionC_1/bn" 3660 | top: "post_res_4_1/branch1/conv1_1x1" 3661 | convolution_param { 3662 | num_output: 512 3663 | pad: 0 3664 | kernel_size: 1 3665 | stride: 1 3666 | bias_term: false 3667 | } 3668 | } 3669 | 3670 | layer{ 3671 | name: "post_res_4_1/branch1/conv1_1x1/bn" 3672 | type: "BN" 3673 | bottom: "post_res_4_1/branch1/conv1_1x1" 3674 | top: "post_res_4_1/branch1/conv1_1x1/bn" 3675 | bn_param { 3676 | frozen: true 3677 | } 3678 | } 3679 | 3680 | layer{ 3681 | name: "post_res_4_1/branch1/conv1_1x1/bn/relu" 3682 | type: "ReLU" 3683 | bottom: "post_res_4_1/branch1/conv1_1x1/bn" 3684 | top: "post_res_4_1/branch1/conv1_1x1/bn" 3685 | } 3686 | 3687 | layer{ 3688 | name: "post_res_4_1/branch1/conv2_3x3" 3689 | type: "Convolution" 3690 | bottom: "post_res_4_1/branch1/conv1_1x1/bn" 3691 | top: "post_res_4_1/branch1/conv2_3x3" 3692 | convolution_param { 3693 | num_output: 512 3694 | pad: 1 3695 | kernel_size: 3 3696 | stride: 2 3697 | bias_term: false 3698 | } 3699 | } 3700 | 3701 | layer{ 3702 | name: "post_res_4_1/branch1/conv2_3x3/bn" 3703 | type: "BN" 3704 | bottom: "post_res_4_1/branch1/conv2_3x3" 3705 | top: "post_res_4_1/branch1/conv2_3x3/bn" 3706 | bn_param { 3707 | frozen: true 3708 | } 3709 | } 3710 | 3711 | layer{ 3712 | name: "post_res_4_1/branch1/conv2_3x3/bn/relu" 3713 | type: "ReLU" 3714 | bottom: "post_res_4_1/branch1/conv2_3x3/bn" 3715 | top: "post_res_4_1/branch1/conv2_3x3/bn" 3716 | } 3717 | 3718 | layer{ 3719 | name: "post_res_4_1/branch1/conv3_1x1" 3720 | type: "Convolution" 3721 | bottom: "post_res_4_1/branch1/conv2_3x3/bn" 3722 | top: "post_res_4_1/branch1/conv3_1x1" 3723 | convolution_param { 3724 | num_output: 2048 3725 | pad: 0 3726 | kernel_size: 1 3727 | stride: 1 3728 | bias_term: false 3729 | } 3730 | } 3731 | 3732 | layer{ 3733 | name: "post_res_4_1/branch2/conv1_1x1" 3734 | type: "Convolution" 3735 | bottom: "AttentionC_1/bn" 3736 | top: "post_res_4_1/branch2/conv1_1x1" 3737 | convolution_param { 3738 | num_output: 2048 3739 | pad: 0 3740 | kernel_size: 1 3741 | stride: 2 3742 | bias_term: false 3743 | } 3744 | } 3745 | 3746 | layer{ 3747 | name: "post_res_4_1" 3748 | type: "Eltwise" 3749 | bottom: "post_res_4_1/branch2/conv1_1x1" 3750 | bottom: "post_res_4_1/branch1/conv3_1x1" 3751 | top: "post_res_4_1" 3752 | eltwise_param { 3753 | operation: SUM 3754 | } 3755 | } 3756 | 3757 | layer{ 3758 | name: "post_res_4_1/bn" 3759 | type: "BN" 3760 | bottom: "post_res_4_1" 3761 | top: "post_res_4_1/bn" 3762 | bn_param { 3763 | frozen: true 3764 | } 3765 | } 3766 | 3767 | layer{ 3768 | name: "post_res_4_1/bn/relu" 3769 | type: "ReLU" 3770 | bottom: "post_res_4_1/bn" 3771 | top: "post_res_4_1/bn" 3772 | } 3773 | 3774 | layer{ 3775 | name: "post_res_4_2/branch1/conv1_1x1" 3776 | type: "Convolution" 3777 | bottom: "post_res_4_1/bn" 3778 | top: "post_res_4_2/branch1/conv1_1x1" 3779 | convolution_param { 3780 | num_output: 512 3781 | pad: 0 3782 | kernel_size: 1 3783 | stride: 1 3784 | bias_term: false 3785 | } 3786 | } 3787 | 3788 | layer{ 3789 | name: "post_res_4_2/branch1/conv1_1x1/bn" 3790 | type: "BN" 3791 | bottom: "post_res_4_2/branch1/conv1_1x1" 3792 | top: "post_res_4_2/branch1/conv1_1x1/bn" 3793 | bn_param { 3794 | frozen: true 3795 | } 3796 | } 3797 | 3798 | layer{ 3799 | name: "post_res_4_2/branch1/conv1_1x1/bn/relu" 3800 | type: "ReLU" 3801 | bottom: "post_res_4_2/branch1/conv1_1x1/bn" 3802 | top: "post_res_4_2/branch1/conv1_1x1/bn" 3803 | } 3804 | 3805 | layer{ 3806 | name: "post_res_4_2/branch1/conv2_3x3" 3807 | type: "Convolution" 3808 | bottom: "post_res_4_2/branch1/conv1_1x1/bn" 3809 | top: "post_res_4_2/branch1/conv2_3x3" 3810 | convolution_param { 3811 | num_output: 512 3812 | pad: 1 3813 | kernel_size: 3 3814 | stride: 1 3815 | bias_term: false 3816 | } 3817 | } 3818 | 3819 | layer{ 3820 | name: "post_res_4_2/branch1/conv2_3x3/bn" 3821 | type: "BN" 3822 | bottom: "post_res_4_2/branch1/conv2_3x3" 3823 | top: "post_res_4_2/branch1/conv2_3x3/bn" 3824 | bn_param { 3825 | frozen: true 3826 | } 3827 | } 3828 | 3829 | layer{ 3830 | name: "post_res_4_2/branch1/conv2_3x3/bn/relu" 3831 | type: "ReLU" 3832 | bottom: "post_res_4_2/branch1/conv2_3x3/bn" 3833 | top: "post_res_4_2/branch1/conv2_3x3/bn" 3834 | } 3835 | 3836 | layer{ 3837 | name: "post_res_4_2/branch1/conv3_1x1" 3838 | type: "Convolution" 3839 | bottom: "post_res_4_2/branch1/conv2_3x3/bn" 3840 | top: "post_res_4_2/branch1/conv3_1x1" 3841 | convolution_param { 3842 | num_output: 2048 3843 | pad: 0 3844 | kernel_size: 1 3845 | stride: 1 3846 | bias_term: false 3847 | } 3848 | } 3849 | 3850 | layer{ 3851 | name: "post_res_4_2" 3852 | type: "Eltwise" 3853 | bottom: "post_res_4_2/branch1/conv3_1x1" 3854 | bottom: "post_res_4_1" 3855 | top: "post_res_4_2" 3856 | eltwise_param { 3857 | operation: SUM 3858 | } 3859 | } 3860 | 3861 | layer{ 3862 | name: "post_res_4_2/bn" 3863 | type: "BN" 3864 | bottom: "post_res_4_2" 3865 | top: "post_res_4_2/bn" 3866 | bn_param { 3867 | frozen: true 3868 | } 3869 | } 3870 | 3871 | layer{ 3872 | name: "post_res_4_2/bn/relu" 3873 | type: "ReLU" 3874 | bottom: "post_res_4_2/bn" 3875 | top: "post_res_4_2/bn" 3876 | } 3877 | 3878 | layer{ 3879 | name: "post_res_4_3/branch1/conv1_1x1" 3880 | type: "Convolution" 3881 | bottom: "post_res_4_2/bn" 3882 | top: "post_res_4_3/branch1/conv1_1x1" 3883 | convolution_param { 3884 | num_output: 512 3885 | pad: 0 3886 | kernel_size: 1 3887 | stride: 1 3888 | bias_term: false 3889 | } 3890 | } 3891 | 3892 | layer{ 3893 | name: "post_res_4_3/branch1/conv1_1x1/bn" 3894 | type: "BN" 3895 | bottom: "post_res_4_3/branch1/conv1_1x1" 3896 | top: "post_res_4_3/branch1/conv1_1x1/bn" 3897 | bn_param { 3898 | frozen: true 3899 | } 3900 | } 3901 | 3902 | layer{ 3903 | name: "post_res_4_3/branch1/conv1_1x1/bn/relu" 3904 | type: "ReLU" 3905 | bottom: "post_res_4_3/branch1/conv1_1x1/bn" 3906 | top: "post_res_4_3/branch1/conv1_1x1/bn" 3907 | } 3908 | 3909 | layer{ 3910 | name: "post_res_4_3/branch1/conv2_3x3" 3911 | type: "Convolution" 3912 | bottom: "post_res_4_3/branch1/conv1_1x1/bn" 3913 | top: "post_res_4_3/branch1/conv2_3x3" 3914 | convolution_param { 3915 | num_output: 512 3916 | pad: 1 3917 | kernel_size: 3 3918 | stride: 1 3919 | bias_term: false 3920 | } 3921 | } 3922 | 3923 | layer{ 3924 | name: "post_res_4_3/branch1/conv2_3x3/bn" 3925 | type: "BN" 3926 | bottom: "post_res_4_3/branch1/conv2_3x3" 3927 | top: "post_res_4_3/branch1/conv2_3x3/bn" 3928 | bn_param { 3929 | frozen: true 3930 | } 3931 | } 3932 | 3933 | layer{ 3934 | name: "post_res_4_3/branch1/conv2_3x3/bn/relu" 3935 | type: "ReLU" 3936 | bottom: "post_res_4_3/branch1/conv2_3x3/bn" 3937 | top: "post_res_4_3/branch1/conv2_3x3/bn" 3938 | } 3939 | 3940 | layer{ 3941 | name: "post_res_4_3/branch1/conv3_1x1" 3942 | type: "Convolution" 3943 | bottom: "post_res_4_3/branch1/conv2_3x3/bn" 3944 | top: "post_res_4_3/branch1/conv3_1x1" 3945 | convolution_param { 3946 | num_output: 2048 3947 | pad: 0 3948 | kernel_size: 1 3949 | stride: 1 3950 | bias_term: false 3951 | } 3952 | } 3953 | 3954 | layer{ 3955 | name: "post_res_4_3" 3956 | type: "Eltwise" 3957 | bottom: "post_res_4_3/branch1/conv3_1x1" 3958 | bottom: "post_res_4_2" 3959 | top: "post_res_4_3" 3960 | eltwise_param { 3961 | operation: SUM 3962 | } 3963 | } 3964 | 3965 | layer{ 3966 | name: "post_res_4_3/bn" 3967 | type: "BN" 3968 | bottom: "post_res_4_3" 3969 | top: "post_res_4_3/bn" 3970 | bn_param { 3971 | frozen: true 3972 | } 3973 | } 3974 | 3975 | layer{ 3976 | name: "post_res_4_3/bn/relu" 3977 | type: "ReLU" 3978 | bottom: "post_res_4_3/bn" 3979 | top: "post_res_4_3/bn" 3980 | } 3981 | 3982 | layer{ 3983 | name: "ave_pool" 3984 | type: "Pooling" 3985 | bottom: "post_res_4_3/bn" 3986 | top: "ave_pool" 3987 | pooling_param { 3988 | pool: AVE 3989 | kernel_size: 7 3990 | stride: 1 3991 | } 3992 | } 3993 | 3994 | layer{ 3995 | name: "classifier" 3996 | type: "InnerProduct" 3997 | bottom: "ave_pool" 3998 | top: "classifier" 3999 | inner_product_param { 4000 | num_output: 1000 4001 | } 4002 | } 4003 | 4004 | layer{ 4005 | name: "cls" 4006 | type: "Softmax" 4007 | bottom: "classifier" 4008 | top: "cls" 4009 | } 4010 | 4011 | -------------------------------------------------------------------------------- /imagenet_model/AttentionNeXt-56-deploy.prototxt: -------------------------------------------------------------------------------- 1 | name: "AttentionNext-56" 2 | input: "data" 3 | input_dim: 1 4 | input_dim: 3 5 | input_dim: 224 6 | input_dim: 224 7 | 8 | 9 | layer{ 10 | name: "conv1" 11 | type: "Convolution" 12 | bottom: "data" 13 | top: "conv1" 14 | param{ 15 | lr_mult: 1 16 | decay_mult: 1 17 | } 18 | convolution_param { 19 | num_output: 64 20 | pad: 3 21 | kernel_size: 7 22 | stride: 2 23 | bias_term: false 24 | } 25 | } 26 | 27 | layer{ 28 | name: "conv1/bn" 29 | type: "BN" 30 | bottom: "conv1" 31 | top: "conv1/bn" 32 | bn_param { 33 | frozen: true 34 | } 35 | } 36 | 37 | layer{ 38 | name: "conv1/bn/relu" 39 | type: "ReLU" 40 | bottom: "conv1/bn" 41 | top: "conv1/bn" 42 | } 43 | 44 | layer{ 45 | name: "pool1_3x3_s2" 46 | type: "Pooling" 47 | bottom: "conv1/bn" 48 | top: "pool1_3x3_s2" 49 | pooling_param { 50 | pool: MAX 51 | kernel_size: 3 52 | stride: 2 53 | } 54 | } 55 | 56 | layer{ 57 | name: "pre_res_1/branch1/conv1_1x1" 58 | type: "Convolution" 59 | bottom: "pool1_3x3_s2" 60 | top: "pre_res_1/branch1/conv1_1x1" 61 | param{ 62 | lr_mult: 1 63 | decay_mult: 1 64 | } 65 | convolution_param { 66 | num_output: 128 67 | pad: 0 68 | kernel_size: 1 69 | stride: 1 70 | bias_term: false 71 | } 72 | } 73 | 74 | layer{ 75 | name: "pre_res_1/branch1/conv1_1x1/bn" 76 | type: "BN" 77 | bottom: "pre_res_1/branch1/conv1_1x1" 78 | top: "pre_res_1/branch1/conv1_1x1/bn" 79 | bn_param { 80 | frozen: true 81 | } 82 | } 83 | 84 | layer{ 85 | name: "pre_res_1/branch1/conv1_1x1/bn/relu" 86 | type: "ReLU" 87 | bottom: "pre_res_1/branch1/conv1_1x1/bn" 88 | top: "pre_res_1/branch1/conv1_1x1/bn" 89 | } 90 | 91 | layer{ 92 | name: "pre_res_1/branch1/conv2_3x3" 93 | type: "Convolution" 94 | bottom: "pre_res_1/branch1/conv1_1x1/bn" 95 | top: "pre_res_1/branch1/conv2_3x3" 96 | param{ 97 | lr_mult: 1 98 | decay_mult: 1 99 | } 100 | convolution_param { 101 | num_output: 128 102 | pad: 1 103 | kernel_size: 3 104 | stride: 1 105 | group: 32 106 | bias_term: false 107 | } 108 | } 109 | 110 | layer{ 111 | name: "pre_res_1/branch1/conv2_3x3/bn" 112 | type: "BN" 113 | bottom: "pre_res_1/branch1/conv2_3x3" 114 | top: "pre_res_1/branch1/conv2_3x3/bn" 115 | bn_param { 116 | frozen: true 117 | } 118 | } 119 | 120 | layer{ 121 | name: "pre_res_1/branch1/conv2_3x3/bn/relu" 122 | type: "ReLU" 123 | bottom: "pre_res_1/branch1/conv2_3x3/bn" 124 | top: "pre_res_1/branch1/conv2_3x3/bn" 125 | } 126 | 127 | layer{ 128 | name: "pre_res_1/branch1/conv3_1x1" 129 | type: "Convolution" 130 | bottom: "pre_res_1/branch1/conv2_3x3/bn" 131 | top: "pre_res_1/branch1/conv3_1x1" 132 | param{ 133 | lr_mult: 1 134 | decay_mult: 1 135 | } 136 | convolution_param { 137 | num_output: 256 138 | pad: 0 139 | kernel_size: 1 140 | stride: 1 141 | bias_term: false 142 | } 143 | } 144 | 145 | layer{ 146 | name: "pre_res_1/branch1/conv3_1x1/bn" 147 | type: "BN" 148 | bottom: "pre_res_1/branch1/conv3_1x1" 149 | top: "pre_res_1/branch1/conv3_1x1/bn" 150 | bn_param { 151 | frozen: true 152 | } 153 | } 154 | 155 | layer{ 156 | name: "pre_res_1/branch2/conv1_1x1" 157 | type: "Convolution" 158 | bottom: "pool1_3x3_s2" 159 | top: "pre_res_1/branch2/conv1_1x1" 160 | param{ 161 | lr_mult: 1 162 | decay_mult: 1 163 | } 164 | convolution_param { 165 | num_output: 256 166 | pad: 0 167 | kernel_size: 1 168 | stride: 1 169 | bias_term: false 170 | } 171 | } 172 | 173 | layer{ 174 | name: "pre_res_1/branch2/conv1_1x1/bn" 175 | type: "BN" 176 | bottom: "pre_res_1/branch2/conv1_1x1" 177 | top: "pre_res_1/branch2/conv1_1x1/bn" 178 | bn_param { 179 | frozen: true 180 | } 181 | } 182 | 183 | layer{ 184 | name: "pre_res_1" 185 | type: "Eltwise" 186 | bottom: "pre_res_1/branch2/conv1_1x1/bn" 187 | bottom: "pre_res_1/branch1/conv3_1x1/bn" 188 | top: "pre_res_1" 189 | eltwise_param { 190 | operation: SUM 191 | } 192 | } 193 | 194 | layer{ 195 | name: "pre_res_1/relu" 196 | type: "ReLU" 197 | bottom: "pre_res_1" 198 | top: "pre_res_1" 199 | } 200 | 201 | layer{ 202 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 203 | type: "Convolution" 204 | bottom: "pre_res_1" 205 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 206 | param{ 207 | lr_mult: 1 208 | decay_mult: 1 209 | } 210 | convolution_param { 211 | num_output: 128 212 | pad: 0 213 | kernel_size: 1 214 | stride: 1 215 | bias_term: false 216 | } 217 | } 218 | 219 | layer{ 220 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 221 | type: "BN" 222 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1" 223 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 224 | bn_param { 225 | frozen: true 226 | } 227 | } 228 | 229 | layer{ 230 | name: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn/relu" 231 | type: "ReLU" 232 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 233 | top: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 234 | } 235 | 236 | layer{ 237 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 238 | type: "Convolution" 239 | bottom: "AttentionA_1/trunk/res1/branch1/conv1_1x1/bn" 240 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 241 | param{ 242 | lr_mult: 1 243 | decay_mult: 1 244 | } 245 | convolution_param { 246 | num_output: 128 247 | pad: 1 248 | kernel_size: 3 249 | stride: 1 250 | group: 32 251 | bias_term: false 252 | } 253 | } 254 | 255 | layer{ 256 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 257 | type: "BN" 258 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3" 259 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 260 | bn_param { 261 | frozen: true 262 | } 263 | } 264 | 265 | layer{ 266 | name: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn/relu" 267 | type: "ReLU" 268 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 269 | top: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 270 | } 271 | 272 | layer{ 273 | name: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 274 | type: "Convolution" 275 | bottom: "AttentionA_1/trunk/res1/branch1/conv2_3x3/bn" 276 | top: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 277 | param{ 278 | lr_mult: 1 279 | decay_mult: 1 280 | } 281 | convolution_param { 282 | num_output: 256 283 | pad: 0 284 | kernel_size: 1 285 | stride: 1 286 | bias_term: false 287 | } 288 | } 289 | 290 | layer{ 291 | name: "AttentionA_1/trunk/res1/branch1/conv3_1x1/bn" 292 | type: "BN" 293 | bottom: "AttentionA_1/trunk/res1/branch1/conv3_1x1" 294 | top: "AttentionA_1/trunk/res1/branch1/conv3_1x1/bn" 295 | bn_param { 296 | frozen: true 297 | } 298 | } 299 | 300 | layer{ 301 | name: "AttentionA_1/trunk/res1" 302 | type: "Eltwise" 303 | bottom: "AttentionA_1/trunk/res1/branch1/conv3_1x1/bn" 304 | bottom: "pre_res_1" 305 | top: "AttentionA_1/trunk/res1" 306 | eltwise_param { 307 | operation: SUM 308 | } 309 | } 310 | 311 | layer{ 312 | name: "AttentionA_1/trunk/res1/relu" 313 | type: "ReLU" 314 | bottom: "AttentionA_1/trunk/res1" 315 | top: "AttentionA_1/trunk/res1" 316 | } 317 | 318 | layer{ 319 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 320 | type: "Convolution" 321 | bottom: "AttentionA_1/trunk/res1" 322 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 323 | param{ 324 | lr_mult: 1 325 | decay_mult: 1 326 | } 327 | convolution_param { 328 | num_output: 128 329 | pad: 0 330 | kernel_size: 1 331 | stride: 1 332 | bias_term: false 333 | } 334 | } 335 | 336 | layer{ 337 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 338 | type: "BN" 339 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1" 340 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 341 | bn_param { 342 | frozen: true 343 | } 344 | } 345 | 346 | layer{ 347 | name: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn/relu" 348 | type: "ReLU" 349 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 350 | top: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 351 | } 352 | 353 | layer{ 354 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 355 | type: "Convolution" 356 | bottom: "AttentionA_1/trunk/res2/branch1/conv1_1x1/bn" 357 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 358 | param{ 359 | lr_mult: 1 360 | decay_mult: 1 361 | } 362 | convolution_param { 363 | num_output: 128 364 | pad: 1 365 | kernel_size: 3 366 | stride: 1 367 | group: 32 368 | bias_term: false 369 | } 370 | } 371 | 372 | layer{ 373 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 374 | type: "BN" 375 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3" 376 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 377 | bn_param { 378 | frozen: true 379 | } 380 | } 381 | 382 | layer{ 383 | name: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn/relu" 384 | type: "ReLU" 385 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 386 | top: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 387 | } 388 | 389 | layer{ 390 | name: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 391 | type: "Convolution" 392 | bottom: "AttentionA_1/trunk/res2/branch1/conv2_3x3/bn" 393 | top: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 394 | param{ 395 | lr_mult: 1 396 | decay_mult: 1 397 | } 398 | convolution_param { 399 | num_output: 256 400 | pad: 0 401 | kernel_size: 1 402 | stride: 1 403 | bias_term: false 404 | } 405 | } 406 | 407 | layer{ 408 | name: "AttentionA_1/trunk/res2/branch1/conv3_1x1/bn" 409 | type: "BN" 410 | bottom: "AttentionA_1/trunk/res2/branch1/conv3_1x1" 411 | top: "AttentionA_1/trunk/res2/branch1/conv3_1x1/bn" 412 | bn_param { 413 | frozen: true 414 | } 415 | } 416 | 417 | layer{ 418 | name: "AttentionA_1/trunk/res2" 419 | type: "Eltwise" 420 | bottom: "AttentionA_1/trunk/res2/branch1/conv3_1x1/bn" 421 | bottom: "AttentionA_1/trunk/res1" 422 | top: "AttentionA_1/trunk/res2" 423 | eltwise_param { 424 | operation: SUM 425 | } 426 | } 427 | 428 | layer{ 429 | name: "AttentionA_1/trunk/res2/relu" 430 | type: "ReLU" 431 | bottom: "AttentionA_1/trunk/res2" 432 | top: "AttentionA_1/trunk/res2" 433 | } 434 | 435 | layer{ 436 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 437 | type: "Convolution" 438 | bottom: "AttentionA_1/trunk/res2" 439 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 440 | param{ 441 | lr_mult: 1 442 | decay_mult: 1 443 | } 444 | convolution_param { 445 | num_output: 128 446 | pad: 0 447 | kernel_size: 1 448 | stride: 1 449 | bias_term: false 450 | } 451 | } 452 | 453 | layer{ 454 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 455 | type: "BN" 456 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1" 457 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 458 | bn_param { 459 | frozen: true 460 | } 461 | } 462 | 463 | layer{ 464 | name: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn/relu" 465 | type: "ReLU" 466 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 467 | top: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 468 | } 469 | 470 | layer{ 471 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 472 | type: "Convolution" 473 | bottom: "AttentionA_1/trunk/res3/branch1/conv1_1x1/bn" 474 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 475 | param{ 476 | lr_mult: 1 477 | decay_mult: 1 478 | } 479 | convolution_param { 480 | num_output: 128 481 | pad: 1 482 | kernel_size: 3 483 | stride: 1 484 | group: 32 485 | bias_term: false 486 | } 487 | } 488 | 489 | layer{ 490 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 491 | type: "BN" 492 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3" 493 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 494 | bn_param { 495 | frozen: true 496 | } 497 | } 498 | 499 | layer{ 500 | name: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn/relu" 501 | type: "ReLU" 502 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 503 | top: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 504 | } 505 | 506 | layer{ 507 | name: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 508 | type: "Convolution" 509 | bottom: "AttentionA_1/trunk/res3/branch1/conv2_3x3/bn" 510 | top: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 511 | param{ 512 | lr_mult: 1 513 | decay_mult: 1 514 | } 515 | convolution_param { 516 | num_output: 256 517 | pad: 0 518 | kernel_size: 1 519 | stride: 1 520 | bias_term: false 521 | } 522 | } 523 | 524 | layer{ 525 | name: "AttentionA_1/trunk/res3/branch1/conv3_1x1/bn" 526 | type: "BN" 527 | bottom: "AttentionA_1/trunk/res3/branch1/conv3_1x1" 528 | top: "AttentionA_1/trunk/res3/branch1/conv3_1x1/bn" 529 | bn_param { 530 | frozen: true 531 | } 532 | } 533 | 534 | layer{ 535 | name: "AttentionA_1/trunk/res3" 536 | type: "Eltwise" 537 | bottom: "AttentionA_1/trunk/res3/branch1/conv3_1x1/bn" 538 | bottom: "AttentionA_1/trunk/res2" 539 | top: "AttentionA_1/trunk/res3" 540 | eltwise_param { 541 | operation: SUM 542 | } 543 | } 544 | 545 | layer{ 546 | name: "AttentionA_1/trunk/res3/relu" 547 | type: "ReLU" 548 | bottom: "AttentionA_1/trunk/res3" 549 | top: "AttentionA_1/trunk/res3" 550 | } 551 | 552 | layer{ 553 | name: "AttentionA_1/mask/down/pool1_3x3_s2" 554 | type: "Pooling" 555 | bottom: "AttentionA_1/trunk/res1" 556 | top: "AttentionA_1/mask/down/pool1_3x3_s2" 557 | pooling_param { 558 | pool: MAX 559 | kernel_size: 3 560 | stride: 2 561 | } 562 | } 563 | 564 | layer{ 565 | name: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1" 566 | type: "Convolution" 567 | bottom: "AttentionA_1/mask/down/pool1_3x3_s2" 568 | top: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1" 569 | param{ 570 | lr_mult: 1 571 | decay_mult: 1 572 | } 573 | convolution_param { 574 | num_output: 128 575 | pad: 0 576 | kernel_size: 1 577 | stride: 1 578 | bias_term: false 579 | } 580 | } 581 | 582 | layer{ 583 | name: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn" 584 | type: "BN" 585 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1" 586 | top: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn" 587 | bn_param { 588 | frozen: true 589 | } 590 | } 591 | 592 | layer{ 593 | name: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn/relu" 594 | type: "ReLU" 595 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn" 596 | top: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn" 597 | } 598 | 599 | layer{ 600 | name: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3" 601 | type: "Convolution" 602 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv1_1x1/bn" 603 | top: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3" 604 | param{ 605 | lr_mult: 1 606 | decay_mult: 1 607 | } 608 | convolution_param { 609 | num_output: 128 610 | pad: 1 611 | kernel_size: 3 612 | stride: 1 613 | group: 32 614 | bias_term: false 615 | } 616 | } 617 | 618 | layer{ 619 | name: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn" 620 | type: "BN" 621 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3" 622 | top: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn" 623 | bn_param { 624 | frozen: true 625 | } 626 | } 627 | 628 | layer{ 629 | name: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn/relu" 630 | type: "ReLU" 631 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn" 632 | top: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn" 633 | } 634 | 635 | layer{ 636 | name: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1" 637 | type: "Convolution" 638 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv2_3x3/bn" 639 | top: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1" 640 | param{ 641 | lr_mult: 1 642 | decay_mult: 1 643 | } 644 | convolution_param { 645 | num_output: 256 646 | pad: 0 647 | kernel_size: 1 648 | stride: 1 649 | bias_term: false 650 | } 651 | } 652 | 653 | layer{ 654 | name: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1/bn" 655 | type: "BN" 656 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1" 657 | top: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1/bn" 658 | bn_param { 659 | frozen: true 660 | } 661 | } 662 | 663 | layer{ 664 | name: "AttentionA_1/mask/down/res1_1" 665 | type: "Eltwise" 666 | bottom: "AttentionA_1/mask/down/res1_1/branch1/conv3_1x1/bn" 667 | bottom: "AttentionA_1/mask/down/pool1_3x3_s2" 668 | top: "AttentionA_1/mask/down/res1_1" 669 | eltwise_param { 670 | operation: SUM 671 | } 672 | } 673 | 674 | layer{ 675 | name: "AttentionA_1/mask/down/res1_1/relu" 676 | type: "ReLU" 677 | bottom: "AttentionA_1/mask/down/res1_1" 678 | top: "AttentionA_1/mask/down/res1_1" 679 | } 680 | 681 | layer{ 682 | name: "AttentionA_1/mask/down/pool2_3x3_s2" 683 | type: "Pooling" 684 | bottom: "AttentionA_1/mask/down/res1_1" 685 | top: "AttentionA_1/mask/down/pool2_3x3_s2" 686 | pooling_param { 687 | pool: MAX 688 | kernel_size: 3 689 | stride: 2 690 | } 691 | } 692 | 693 | layer{ 694 | name: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1" 695 | type: "Convolution" 696 | bottom: "AttentionA_1/mask/down/pool2_3x3_s2" 697 | top: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1" 698 | param{ 699 | lr_mult: 1 700 | decay_mult: 1 701 | } 702 | convolution_param { 703 | num_output: 128 704 | pad: 0 705 | kernel_size: 1 706 | stride: 1 707 | bias_term: false 708 | } 709 | } 710 | 711 | layer{ 712 | name: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn" 713 | type: "BN" 714 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1" 715 | top: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn" 716 | bn_param { 717 | frozen: true 718 | } 719 | } 720 | 721 | layer{ 722 | name: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn/relu" 723 | type: "ReLU" 724 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn" 725 | top: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn" 726 | } 727 | 728 | layer{ 729 | name: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3" 730 | type: "Convolution" 731 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv1_1x1/bn" 732 | top: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3" 733 | param{ 734 | lr_mult: 1 735 | decay_mult: 1 736 | } 737 | convolution_param { 738 | num_output: 128 739 | pad: 1 740 | kernel_size: 3 741 | stride: 1 742 | group: 32 743 | bias_term: false 744 | } 745 | } 746 | 747 | layer{ 748 | name: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn" 749 | type: "BN" 750 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3" 751 | top: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn" 752 | bn_param { 753 | frozen: true 754 | } 755 | } 756 | 757 | layer{ 758 | name: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn/relu" 759 | type: "ReLU" 760 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn" 761 | top: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn" 762 | } 763 | 764 | layer{ 765 | name: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1" 766 | type: "Convolution" 767 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv2_3x3/bn" 768 | top: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1" 769 | param{ 770 | lr_mult: 1 771 | decay_mult: 1 772 | } 773 | convolution_param { 774 | num_output: 256 775 | pad: 0 776 | kernel_size: 1 777 | stride: 1 778 | bias_term: false 779 | } 780 | } 781 | 782 | layer{ 783 | name: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1/bn" 784 | type: "BN" 785 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1" 786 | top: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1/bn" 787 | bn_param { 788 | frozen: true 789 | } 790 | } 791 | 792 | layer{ 793 | name: "AttentionA_1/mask/down/res2_1" 794 | type: "Eltwise" 795 | bottom: "AttentionA_1/mask/down/res2_1/branch1/conv3_1x1/bn" 796 | bottom: "AttentionA_1/mask/down/pool2_3x3_s2" 797 | top: "AttentionA_1/mask/down/res2_1" 798 | eltwise_param { 799 | operation: SUM 800 | } 801 | } 802 | 803 | layer{ 804 | name: "AttentionA_1/mask/down/res2_1/relu" 805 | type: "ReLU" 806 | bottom: "AttentionA_1/mask/down/res2_1" 807 | top: "AttentionA_1/mask/down/res2_1" 808 | } 809 | 810 | layer{ 811 | name: "AttentionA_1/mask/down/pool3_3x3_s2" 812 | type: "Pooling" 813 | bottom: "AttentionA_1/mask/down/res2_1" 814 | top: "AttentionA_1/mask/down/pool3_3x3_s2" 815 | pooling_param { 816 | pool: MAX 817 | kernel_size: 3 818 | stride: 2 819 | } 820 | } 821 | 822 | layer{ 823 | name: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1" 824 | type: "Convolution" 825 | bottom: "AttentionA_1/mask/down/pool3_3x3_s2" 826 | top: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1" 827 | param{ 828 | lr_mult: 1 829 | decay_mult: 1 830 | } 831 | convolution_param { 832 | num_output: 128 833 | pad: 0 834 | kernel_size: 1 835 | stride: 1 836 | bias_term: false 837 | } 838 | } 839 | 840 | layer{ 841 | name: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn" 842 | type: "BN" 843 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1" 844 | top: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn" 845 | bn_param { 846 | frozen: true 847 | } 848 | } 849 | 850 | layer{ 851 | name: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn/relu" 852 | type: "ReLU" 853 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn" 854 | top: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn" 855 | } 856 | 857 | layer{ 858 | name: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3" 859 | type: "Convolution" 860 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv1_1x1/bn" 861 | top: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3" 862 | param{ 863 | lr_mult: 1 864 | decay_mult: 1 865 | } 866 | convolution_param { 867 | num_output: 128 868 | pad: 1 869 | kernel_size: 3 870 | stride: 1 871 | group: 32 872 | bias_term: false 873 | } 874 | } 875 | 876 | layer{ 877 | name: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn" 878 | type: "BN" 879 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3" 880 | top: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn" 881 | bn_param { 882 | frozen: true 883 | } 884 | } 885 | 886 | layer{ 887 | name: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn/relu" 888 | type: "ReLU" 889 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn" 890 | top: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn" 891 | } 892 | 893 | layer{ 894 | name: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1" 895 | type: "Convolution" 896 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv2_3x3/bn" 897 | top: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1" 898 | param{ 899 | lr_mult: 1 900 | decay_mult: 1 901 | } 902 | convolution_param { 903 | num_output: 256 904 | pad: 0 905 | kernel_size: 1 906 | stride: 1 907 | bias_term: false 908 | } 909 | } 910 | 911 | layer{ 912 | name: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1/bn" 913 | type: "BN" 914 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1" 915 | top: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1/bn" 916 | bn_param { 917 | frozen: true 918 | } 919 | } 920 | 921 | layer{ 922 | name: "AttentionA_1/mask/down/res3_1" 923 | type: "Eltwise" 924 | bottom: "AttentionA_1/mask/down/res3_1/branch1/conv3_1x1/bn" 925 | bottom: "AttentionA_1/mask/down/pool3_3x3_s2" 926 | top: "AttentionA_1/mask/down/res3_1" 927 | eltwise_param { 928 | operation: SUM 929 | } 930 | } 931 | 932 | layer{ 933 | name: "AttentionA_1/mask/down/res3_1/relu" 934 | type: "ReLU" 935 | bottom: "AttentionA_1/mask/down/res3_1" 936 | top: "AttentionA_1/mask/down/res3_1" 937 | } 938 | 939 | layer{ 940 | name: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1" 941 | type: "Convolution" 942 | bottom: "AttentionA_1/mask/down/res3_1" 943 | top: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1" 944 | param{ 945 | lr_mult: 1 946 | decay_mult: 1 947 | } 948 | convolution_param { 949 | num_output: 128 950 | pad: 0 951 | kernel_size: 1 952 | stride: 1 953 | bias_term: false 954 | } 955 | } 956 | 957 | layer{ 958 | name: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn" 959 | type: "BN" 960 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1" 961 | top: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn" 962 | bn_param { 963 | frozen: true 964 | } 965 | } 966 | 967 | layer{ 968 | name: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn/relu" 969 | type: "ReLU" 970 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn" 971 | top: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn" 972 | } 973 | 974 | layer{ 975 | name: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3" 976 | type: "Convolution" 977 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv1_1x1/bn" 978 | top: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3" 979 | param{ 980 | lr_mult: 1 981 | decay_mult: 1 982 | } 983 | convolution_param { 984 | num_output: 128 985 | pad: 1 986 | kernel_size: 3 987 | stride: 1 988 | group: 32 989 | bias_term: false 990 | } 991 | } 992 | 993 | layer{ 994 | name: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn" 995 | type: "BN" 996 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3" 997 | top: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn" 998 | bn_param { 999 | frozen: true 1000 | } 1001 | } 1002 | 1003 | layer{ 1004 | name: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn/relu" 1005 | type: "ReLU" 1006 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn" 1007 | top: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn" 1008 | } 1009 | 1010 | layer{ 1011 | name: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1" 1012 | type: "Convolution" 1013 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv2_3x3/bn" 1014 | top: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1" 1015 | param{ 1016 | lr_mult: 1 1017 | decay_mult: 1 1018 | } 1019 | convolution_param { 1020 | num_output: 256 1021 | pad: 0 1022 | kernel_size: 1 1023 | stride: 1 1024 | bias_term: false 1025 | } 1026 | } 1027 | 1028 | layer{ 1029 | name: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1/bn" 1030 | type: "BN" 1031 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1" 1032 | top: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1/bn" 1033 | bn_param { 1034 | frozen: true 1035 | } 1036 | } 1037 | 1038 | layer{ 1039 | name: "AttentionA_1/mask/down/res3_2" 1040 | type: "Eltwise" 1041 | bottom: "AttentionA_1/mask/down/res3_2/branch1/conv3_1x1/bn" 1042 | bottom: "AttentionA_1/mask/down/res3_1" 1043 | top: "AttentionA_1/mask/down/res3_2" 1044 | eltwise_param { 1045 | operation: SUM 1046 | } 1047 | } 1048 | 1049 | layer{ 1050 | name: "AttentionA_1/mask/down/res3_2/relu" 1051 | type: "ReLU" 1052 | bottom: "AttentionA_1/mask/down/res3_2" 1053 | top: "AttentionA_1/mask/down/res3_2" 1054 | } 1055 | 1056 | layer{ 1057 | name: "AttentionA_1/mask/up/interp_3" 1058 | type: "Interp" 1059 | bottom: "AttentionA_1/mask/down/res3_2" 1060 | bottom: "AttentionA_1/mask/down/res2_1" 1061 | top: "AttentionA_1/mask/up/interp_3" 1062 | } 1063 | 1064 | layer{ 1065 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 1066 | type: "Convolution" 1067 | bottom: "AttentionA_1/mask/down/res2_1" 1068 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 1069 | param{ 1070 | lr_mult: 1 1071 | decay_mult: 1 1072 | } 1073 | convolution_param { 1074 | num_output: 128 1075 | pad: 0 1076 | kernel_size: 1 1077 | stride: 1 1078 | bias_term: false 1079 | } 1080 | } 1081 | 1082 | layer{ 1083 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 1084 | type: "BN" 1085 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1" 1086 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 1087 | bn_param { 1088 | frozen: true 1089 | } 1090 | } 1091 | 1092 | layer{ 1093 | name: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn/relu" 1094 | type: "ReLU" 1095 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 1096 | top: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 1097 | } 1098 | 1099 | layer{ 1100 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 1101 | type: "Convolution" 1102 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv1_1x1/bn" 1103 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 1104 | param{ 1105 | lr_mult: 1 1106 | decay_mult: 1 1107 | } 1108 | convolution_param { 1109 | num_output: 128 1110 | pad: 1 1111 | kernel_size: 3 1112 | stride: 1 1113 | group: 32 1114 | bias_term: false 1115 | } 1116 | } 1117 | 1118 | layer{ 1119 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1120 | type: "BN" 1121 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3" 1122 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1123 | bn_param { 1124 | frozen: true 1125 | } 1126 | } 1127 | 1128 | layer{ 1129 | name: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn/relu" 1130 | type: "ReLU" 1131 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1132 | top: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1133 | } 1134 | 1135 | layer{ 1136 | name: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1137 | type: "Convolution" 1138 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv2_3x3/bn" 1139 | top: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1140 | param{ 1141 | lr_mult: 1 1142 | decay_mult: 1 1143 | } 1144 | convolution_param { 1145 | num_output: 256 1146 | pad: 0 1147 | kernel_size: 1 1148 | stride: 1 1149 | bias_term: false 1150 | } 1151 | } 1152 | 1153 | layer{ 1154 | name: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1/bn" 1155 | type: "BN" 1156 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1" 1157 | top: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1/bn" 1158 | bn_param { 1159 | frozen: true 1160 | } 1161 | } 1162 | 1163 | layer{ 1164 | name: "AttentionA_1/mask/skip/res2" 1165 | type: "Eltwise" 1166 | bottom: "AttentionA_1/mask/skip/res2/branch1/conv3_1x1/bn" 1167 | bottom: "AttentionA_1/mask/down/res2_1" 1168 | top: "AttentionA_1/mask/skip/res2" 1169 | eltwise_param { 1170 | operation: SUM 1171 | } 1172 | } 1173 | 1174 | layer{ 1175 | name: "AttentionA_1/mask/skip/res2/relu" 1176 | type: "ReLU" 1177 | bottom: "AttentionA_1/mask/skip/res2" 1178 | top: "AttentionA_1/mask/skip/res2" 1179 | } 1180 | 1181 | layer{ 1182 | name: "AttentionA_1/mask/up2" 1183 | type: "Eltwise" 1184 | bottom: "AttentionA_1/mask/skip/res2" 1185 | bottom: "AttentionA_1/mask/up/interp_3" 1186 | top: "AttentionA_1/mask/up2" 1187 | eltwise_param { 1188 | operation: SUM 1189 | } 1190 | } 1191 | 1192 | layer{ 1193 | name: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1" 1194 | type: "Convolution" 1195 | bottom: "AttentionA_1/mask/up2" 1196 | top: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1" 1197 | param{ 1198 | lr_mult: 1 1199 | decay_mult: 1 1200 | } 1201 | convolution_param { 1202 | num_output: 128 1203 | pad: 0 1204 | kernel_size: 1 1205 | stride: 1 1206 | bias_term: false 1207 | } 1208 | } 1209 | 1210 | layer{ 1211 | name: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn" 1212 | type: "BN" 1213 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1" 1214 | top: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn" 1215 | bn_param { 1216 | frozen: true 1217 | } 1218 | } 1219 | 1220 | layer{ 1221 | name: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn/relu" 1222 | type: "ReLU" 1223 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn" 1224 | top: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn" 1225 | } 1226 | 1227 | layer{ 1228 | name: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3" 1229 | type: "Convolution" 1230 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv1_1x1/bn" 1231 | top: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3" 1232 | param{ 1233 | lr_mult: 1 1234 | decay_mult: 1 1235 | } 1236 | convolution_param { 1237 | num_output: 128 1238 | pad: 1 1239 | kernel_size: 3 1240 | stride: 1 1241 | group: 32 1242 | bias_term: false 1243 | } 1244 | } 1245 | 1246 | layer{ 1247 | name: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn" 1248 | type: "BN" 1249 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3" 1250 | top: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn" 1251 | bn_param { 1252 | frozen: true 1253 | } 1254 | } 1255 | 1256 | layer{ 1257 | name: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn/relu" 1258 | type: "ReLU" 1259 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn" 1260 | top: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn" 1261 | } 1262 | 1263 | layer{ 1264 | name: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1" 1265 | type: "Convolution" 1266 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv2_3x3/bn" 1267 | top: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1" 1268 | param{ 1269 | lr_mult: 1 1270 | decay_mult: 1 1271 | } 1272 | convolution_param { 1273 | num_output: 256 1274 | pad: 0 1275 | kernel_size: 1 1276 | stride: 1 1277 | bias_term: false 1278 | } 1279 | } 1280 | 1281 | layer{ 1282 | name: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1/bn" 1283 | type: "BN" 1284 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1" 1285 | top: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1/bn" 1286 | bn_param { 1287 | frozen: true 1288 | } 1289 | } 1290 | 1291 | layer{ 1292 | name: "AttentionA_1/mask/up/res2_1" 1293 | type: "Eltwise" 1294 | bottom: "AttentionA_1/mask/up/res2_1/branch1/conv3_1x1/bn" 1295 | bottom: "AttentionA_1/mask/up2" 1296 | top: "AttentionA_1/mask/up/res2_1" 1297 | eltwise_param { 1298 | operation: SUM 1299 | } 1300 | } 1301 | 1302 | layer{ 1303 | name: "AttentionA_1/mask/up/res2_1/relu" 1304 | type: "ReLU" 1305 | bottom: "AttentionA_1/mask/up/res2_1" 1306 | top: "AttentionA_1/mask/up/res2_1" 1307 | } 1308 | 1309 | layer{ 1310 | name: "AttentionA_1/mask/up/interp_2" 1311 | type: "Interp" 1312 | bottom: "AttentionA_1/mask/up/res2_1" 1313 | bottom: "AttentionA_1/mask/down/res1_1" 1314 | top: "AttentionA_1/mask/up/interp_2" 1315 | } 1316 | 1317 | layer{ 1318 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1319 | type: "Convolution" 1320 | bottom: "AttentionA_1/mask/down/res1_1" 1321 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1322 | param{ 1323 | lr_mult: 1 1324 | decay_mult: 1 1325 | } 1326 | convolution_param { 1327 | num_output: 128 1328 | pad: 0 1329 | kernel_size: 1 1330 | stride: 1 1331 | bias_term: false 1332 | } 1333 | } 1334 | 1335 | layer{ 1336 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1337 | type: "BN" 1338 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1" 1339 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1340 | bn_param { 1341 | frozen: true 1342 | } 1343 | } 1344 | 1345 | layer{ 1346 | name: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn/relu" 1347 | type: "ReLU" 1348 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1349 | top: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1350 | } 1351 | 1352 | layer{ 1353 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1354 | type: "Convolution" 1355 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv1_1x1/bn" 1356 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1357 | param{ 1358 | lr_mult: 1 1359 | decay_mult: 1 1360 | } 1361 | convolution_param { 1362 | num_output: 128 1363 | pad: 1 1364 | kernel_size: 3 1365 | stride: 1 1366 | group: 32 1367 | bias_term: false 1368 | } 1369 | } 1370 | 1371 | layer{ 1372 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1373 | type: "BN" 1374 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3" 1375 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1376 | bn_param { 1377 | frozen: true 1378 | } 1379 | } 1380 | 1381 | layer{ 1382 | name: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn/relu" 1383 | type: "ReLU" 1384 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1385 | top: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1386 | } 1387 | 1388 | layer{ 1389 | name: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1390 | type: "Convolution" 1391 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv2_3x3/bn" 1392 | top: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1393 | param{ 1394 | lr_mult: 1 1395 | decay_mult: 1 1396 | } 1397 | convolution_param { 1398 | num_output: 256 1399 | pad: 0 1400 | kernel_size: 1 1401 | stride: 1 1402 | bias_term: false 1403 | } 1404 | } 1405 | 1406 | layer{ 1407 | name: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1/bn" 1408 | type: "BN" 1409 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1" 1410 | top: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1/bn" 1411 | bn_param { 1412 | frozen: true 1413 | } 1414 | } 1415 | 1416 | layer{ 1417 | name: "AttentionA_1/mask/skip/res1" 1418 | type: "Eltwise" 1419 | bottom: "AttentionA_1/mask/skip/res1/branch1/conv3_1x1/bn" 1420 | bottom: "AttentionA_1/mask/down/res1_1" 1421 | top: "AttentionA_1/mask/skip/res1" 1422 | eltwise_param { 1423 | operation: SUM 1424 | } 1425 | } 1426 | 1427 | layer{ 1428 | name: "AttentionA_1/mask/skip/res1/relu" 1429 | type: "ReLU" 1430 | bottom: "AttentionA_1/mask/skip/res1" 1431 | top: "AttentionA_1/mask/skip/res1" 1432 | } 1433 | 1434 | layer{ 1435 | name: "AttentionA_1/mask/up1" 1436 | type: "Eltwise" 1437 | bottom: "AttentionA_1/mask/skip/res1" 1438 | bottom: "AttentionA_1/mask/up/interp_2" 1439 | top: "AttentionA_1/mask/up1" 1440 | eltwise_param { 1441 | operation: SUM 1442 | } 1443 | } 1444 | 1445 | layer{ 1446 | name: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1" 1447 | type: "Convolution" 1448 | bottom: "AttentionA_1/mask/up1" 1449 | top: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1" 1450 | param{ 1451 | lr_mult: 1 1452 | decay_mult: 1 1453 | } 1454 | convolution_param { 1455 | num_output: 128 1456 | pad: 0 1457 | kernel_size: 1 1458 | stride: 1 1459 | bias_term: false 1460 | } 1461 | } 1462 | 1463 | layer{ 1464 | name: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn" 1465 | type: "BN" 1466 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1" 1467 | top: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn" 1468 | bn_param { 1469 | frozen: true 1470 | } 1471 | } 1472 | 1473 | layer{ 1474 | name: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn/relu" 1475 | type: "ReLU" 1476 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn" 1477 | top: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn" 1478 | } 1479 | 1480 | layer{ 1481 | name: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3" 1482 | type: "Convolution" 1483 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv1_1x1/bn" 1484 | top: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3" 1485 | param{ 1486 | lr_mult: 1 1487 | decay_mult: 1 1488 | } 1489 | convolution_param { 1490 | num_output: 128 1491 | pad: 1 1492 | kernel_size: 3 1493 | stride: 1 1494 | group: 32 1495 | bias_term: false 1496 | } 1497 | } 1498 | 1499 | layer{ 1500 | name: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn" 1501 | type: "BN" 1502 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3" 1503 | top: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn" 1504 | bn_param { 1505 | frozen: true 1506 | } 1507 | } 1508 | 1509 | layer{ 1510 | name: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn/relu" 1511 | type: "ReLU" 1512 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn" 1513 | top: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn" 1514 | } 1515 | 1516 | layer{ 1517 | name: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1" 1518 | type: "Convolution" 1519 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv2_3x3/bn" 1520 | top: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1" 1521 | param{ 1522 | lr_mult: 1 1523 | decay_mult: 1 1524 | } 1525 | convolution_param { 1526 | num_output: 256 1527 | pad: 0 1528 | kernel_size: 1 1529 | stride: 1 1530 | bias_term: false 1531 | } 1532 | } 1533 | 1534 | layer{ 1535 | name: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1/bn" 1536 | type: "BN" 1537 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1" 1538 | top: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1/bn" 1539 | bn_param { 1540 | frozen: true 1541 | } 1542 | } 1543 | 1544 | layer{ 1545 | name: "AttentionA_1/mask/up/res1_1" 1546 | type: "Eltwise" 1547 | bottom: "AttentionA_1/mask/up/res1_1/branch1/conv3_1x1/bn" 1548 | bottom: "AttentionA_1/mask/up1" 1549 | top: "AttentionA_1/mask/up/res1_1" 1550 | eltwise_param { 1551 | operation: SUM 1552 | } 1553 | } 1554 | 1555 | layer{ 1556 | name: "AttentionA_1/mask/up/res1_1/relu" 1557 | type: "ReLU" 1558 | bottom: "AttentionA_1/mask/up/res1_1" 1559 | top: "AttentionA_1/mask/up/res1_1" 1560 | } 1561 | 1562 | layer{ 1563 | name: "AttentionA_1/mask/up/interp_1" 1564 | type: "Interp" 1565 | bottom: "AttentionA_1/mask/up/res1_1" 1566 | bottom: "AttentionA_1/trunk/res3" 1567 | top: "AttentionA_1/mask/up/interp_1" 1568 | } 1569 | 1570 | layer{ 1571 | name: "AttentionA_1/mask/linear_1" 1572 | type: "Convolution" 1573 | bottom: "AttentionA_1/mask/up/interp_1" 1574 | top: "AttentionA_1/mask/linear_1" 1575 | param{ 1576 | lr_mult: 1 1577 | decay_mult: 1 1578 | } 1579 | convolution_param { 1580 | num_output: 256 1581 | pad: 0 1582 | kernel_size: 1 1583 | stride: 1 1584 | bias_term: false 1585 | } 1586 | } 1587 | 1588 | layer{ 1589 | name: "AttentionA_1/mask/linear_1/bn" 1590 | type: "BN" 1591 | bottom: "AttentionA_1/mask/linear_1" 1592 | top: "AttentionA_1/mask/linear_1/bn" 1593 | bn_param { 1594 | frozen: true 1595 | } 1596 | } 1597 | 1598 | layer{ 1599 | name: "AttentionA_1/mask/linear_1/bn/relu" 1600 | type: "ReLU" 1601 | bottom: "AttentionA_1/mask/linear_1/bn" 1602 | top: "AttentionA_1/mask/linear_1/bn" 1603 | } 1604 | 1605 | layer{ 1606 | name: "AttentionA_1/mask/linear_2" 1607 | type: "Convolution" 1608 | bottom: "AttentionA_1/mask/linear_1/bn" 1609 | top: "AttentionA_1/mask/linear_2" 1610 | param{ 1611 | lr_mult: 1 1612 | decay_mult: 1 1613 | } 1614 | convolution_param { 1615 | num_output: 256 1616 | pad: 0 1617 | kernel_size: 1 1618 | stride: 1 1619 | bias_term: false 1620 | } 1621 | } 1622 | 1623 | layer{ 1624 | name: "AttentionA_1/mask" 1625 | type: "Sigmoid" 1626 | bottom: "AttentionA_1/mask/linear_2" 1627 | top: "AttentionA_1/mask" 1628 | } 1629 | 1630 | layer{ 1631 | name: "AttentionA_1/attention_residual" 1632 | type: "Eltwise" 1633 | bottom: "AttentionA_1/trunk/res3" 1634 | bottom: "AttentionA_1/mask" 1635 | top: "AttentionA_1/attention_residual" 1636 | eltwise_param { 1637 | operation: PROD 1638 | } 1639 | } 1640 | 1641 | layer{ 1642 | name: "AttentionA_1/fusion" 1643 | type: "Eltwise" 1644 | bottom: "AttentionA_1/attention_residual" 1645 | bottom: "AttentionA_1/trunk/res3" 1646 | top: "AttentionA_1/fusion" 1647 | eltwise_param { 1648 | operation: SUM 1649 | } 1650 | } 1651 | 1652 | layer{ 1653 | name: "AttentionA_1/branch1/conv1_1x1" 1654 | type: "Convolution" 1655 | bottom: "AttentionA_1/fusion" 1656 | top: "AttentionA_1/branch1/conv1_1x1" 1657 | param{ 1658 | lr_mult: 1 1659 | decay_mult: 1 1660 | } 1661 | convolution_param { 1662 | num_output: 128 1663 | pad: 0 1664 | kernel_size: 1 1665 | stride: 1 1666 | bias_term: false 1667 | } 1668 | } 1669 | 1670 | layer{ 1671 | name: "AttentionA_1/branch1/conv1_1x1/bn" 1672 | type: "BN" 1673 | bottom: "AttentionA_1/branch1/conv1_1x1" 1674 | top: "AttentionA_1/branch1/conv1_1x1/bn" 1675 | bn_param { 1676 | frozen: true 1677 | } 1678 | } 1679 | 1680 | layer{ 1681 | name: "AttentionA_1/branch1/conv1_1x1/bn/relu" 1682 | type: "ReLU" 1683 | bottom: "AttentionA_1/branch1/conv1_1x1/bn" 1684 | top: "AttentionA_1/branch1/conv1_1x1/bn" 1685 | } 1686 | 1687 | layer{ 1688 | name: "AttentionA_1/branch1/conv2_3x3" 1689 | type: "Convolution" 1690 | bottom: "AttentionA_1/branch1/conv1_1x1/bn" 1691 | top: "AttentionA_1/branch1/conv2_3x3" 1692 | param{ 1693 | lr_mult: 1 1694 | decay_mult: 1 1695 | } 1696 | convolution_param { 1697 | num_output: 128 1698 | pad: 1 1699 | kernel_size: 3 1700 | stride: 1 1701 | group: 32 1702 | bias_term: false 1703 | } 1704 | } 1705 | 1706 | layer{ 1707 | name: "AttentionA_1/branch1/conv2_3x3/bn" 1708 | type: "BN" 1709 | bottom: "AttentionA_1/branch1/conv2_3x3" 1710 | top: "AttentionA_1/branch1/conv2_3x3/bn" 1711 | bn_param { 1712 | frozen: true 1713 | } 1714 | } 1715 | 1716 | layer{ 1717 | name: "AttentionA_1/branch1/conv2_3x3/bn/relu" 1718 | type: "ReLU" 1719 | bottom: "AttentionA_1/branch1/conv2_3x3/bn" 1720 | top: "AttentionA_1/branch1/conv2_3x3/bn" 1721 | } 1722 | 1723 | layer{ 1724 | name: "AttentionA_1/branch1/conv3_1x1" 1725 | type: "Convolution" 1726 | bottom: "AttentionA_1/branch1/conv2_3x3/bn" 1727 | top: "AttentionA_1/branch1/conv3_1x1" 1728 | param{ 1729 | lr_mult: 1 1730 | decay_mult: 1 1731 | } 1732 | convolution_param { 1733 | num_output: 256 1734 | pad: 0 1735 | kernel_size: 1 1736 | stride: 1 1737 | bias_term: false 1738 | } 1739 | } 1740 | 1741 | layer{ 1742 | name: "AttentionA_1/branch1/conv3_1x1/bn" 1743 | type: "BN" 1744 | bottom: "AttentionA_1/branch1/conv3_1x1" 1745 | top: "AttentionA_1/branch1/conv3_1x1/bn" 1746 | bn_param { 1747 | frozen: true 1748 | } 1749 | } 1750 | 1751 | layer{ 1752 | name: "AttentionA_1" 1753 | type: "Eltwise" 1754 | bottom: "AttentionA_1/branch1/conv3_1x1/bn" 1755 | bottom: "AttentionA_1/fusion" 1756 | top: "AttentionA_1" 1757 | eltwise_param { 1758 | operation: SUM 1759 | } 1760 | } 1761 | 1762 | layer{ 1763 | name: "AttentionA_1/relu" 1764 | type: "ReLU" 1765 | bottom: "AttentionA_1" 1766 | top: "AttentionA_1" 1767 | } 1768 | 1769 | layer{ 1770 | name: "pre_res_2/branch1/conv1_1x1" 1771 | type: "Convolution" 1772 | bottom: "AttentionA_1" 1773 | top: "pre_res_2/branch1/conv1_1x1" 1774 | param{ 1775 | lr_mult: 1 1776 | decay_mult: 1 1777 | } 1778 | convolution_param { 1779 | num_output: 256 1780 | pad: 0 1781 | kernel_size: 1 1782 | stride: 1 1783 | bias_term: false 1784 | } 1785 | } 1786 | 1787 | layer{ 1788 | name: "pre_res_2/branch1/conv1_1x1/bn" 1789 | type: "BN" 1790 | bottom: "pre_res_2/branch1/conv1_1x1" 1791 | top: "pre_res_2/branch1/conv1_1x1/bn" 1792 | bn_param { 1793 | frozen: true 1794 | } 1795 | } 1796 | 1797 | layer{ 1798 | name: "pre_res_2/branch1/conv1_1x1/bn/relu" 1799 | type: "ReLU" 1800 | bottom: "pre_res_2/branch1/conv1_1x1/bn" 1801 | top: "pre_res_2/branch1/conv1_1x1/bn" 1802 | } 1803 | 1804 | layer{ 1805 | name: "pre_res_2/branch1/conv2_3x3" 1806 | type: "Convolution" 1807 | bottom: "pre_res_2/branch1/conv1_1x1/bn" 1808 | top: "pre_res_2/branch1/conv2_3x3" 1809 | param{ 1810 | lr_mult: 1 1811 | decay_mult: 1 1812 | } 1813 | convolution_param { 1814 | num_output: 256 1815 | pad: 1 1816 | kernel_size: 3 1817 | stride: 2 1818 | group: 32 1819 | bias_term: false 1820 | } 1821 | } 1822 | 1823 | layer{ 1824 | name: "pre_res_2/branch1/conv2_3x3/bn" 1825 | type: "BN" 1826 | bottom: "pre_res_2/branch1/conv2_3x3" 1827 | top: "pre_res_2/branch1/conv2_3x3/bn" 1828 | bn_param { 1829 | frozen: true 1830 | } 1831 | } 1832 | 1833 | layer{ 1834 | name: "pre_res_2/branch1/conv2_3x3/bn/relu" 1835 | type: "ReLU" 1836 | bottom: "pre_res_2/branch1/conv2_3x3/bn" 1837 | top: "pre_res_2/branch1/conv2_3x3/bn" 1838 | } 1839 | 1840 | layer{ 1841 | name: "pre_res_2/branch1/conv3_1x1" 1842 | type: "Convolution" 1843 | bottom: "pre_res_2/branch1/conv2_3x3/bn" 1844 | top: "pre_res_2/branch1/conv3_1x1" 1845 | param{ 1846 | lr_mult: 1 1847 | decay_mult: 1 1848 | } 1849 | convolution_param { 1850 | num_output: 512 1851 | pad: 0 1852 | kernel_size: 1 1853 | stride: 1 1854 | bias_term: false 1855 | } 1856 | } 1857 | 1858 | layer{ 1859 | name: "pre_res_2/branch1/conv3_1x1/bn" 1860 | type: "BN" 1861 | bottom: "pre_res_2/branch1/conv3_1x1" 1862 | top: "pre_res_2/branch1/conv3_1x1/bn" 1863 | bn_param { 1864 | frozen: true 1865 | } 1866 | } 1867 | 1868 | layer{ 1869 | name: "pre_res_2/branch2/conv1_1x1" 1870 | type: "Convolution" 1871 | bottom: "AttentionA_1" 1872 | top: "pre_res_2/branch2/conv1_1x1" 1873 | param{ 1874 | lr_mult: 1 1875 | decay_mult: 1 1876 | } 1877 | convolution_param { 1878 | num_output: 512 1879 | pad: 0 1880 | kernel_size: 1 1881 | stride: 2 1882 | bias_term: false 1883 | } 1884 | } 1885 | 1886 | layer{ 1887 | name: "pre_res_2/branch2/conv1_1x1/bn" 1888 | type: "BN" 1889 | bottom: "pre_res_2/branch2/conv1_1x1" 1890 | top: "pre_res_2/branch2/conv1_1x1/bn" 1891 | bn_param { 1892 | frozen: true 1893 | } 1894 | } 1895 | 1896 | layer{ 1897 | name: "pre_res_2" 1898 | type: "Eltwise" 1899 | bottom: "pre_res_2/branch2/conv1_1x1/bn" 1900 | bottom: "pre_res_2/branch1/conv3_1x1/bn" 1901 | top: "pre_res_2" 1902 | eltwise_param { 1903 | operation: SUM 1904 | } 1905 | } 1906 | 1907 | layer{ 1908 | name: "pre_res_2/relu" 1909 | type: "ReLU" 1910 | bottom: "pre_res_2" 1911 | top: "pre_res_2" 1912 | } 1913 | 1914 | layer{ 1915 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1916 | type: "Convolution" 1917 | bottom: "pre_res_2" 1918 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1919 | param{ 1920 | lr_mult: 1 1921 | decay_mult: 1 1922 | } 1923 | convolution_param { 1924 | num_output: 256 1925 | pad: 0 1926 | kernel_size: 1 1927 | stride: 1 1928 | bias_term: false 1929 | } 1930 | } 1931 | 1932 | layer{ 1933 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1934 | type: "BN" 1935 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1" 1936 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1937 | bn_param { 1938 | frozen: true 1939 | } 1940 | } 1941 | 1942 | layer{ 1943 | name: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn/relu" 1944 | type: "ReLU" 1945 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1946 | top: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1947 | } 1948 | 1949 | layer{ 1950 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1951 | type: "Convolution" 1952 | bottom: "AttentionB_1/trunk/res1/branch1/conv1_1x1/bn" 1953 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1954 | param{ 1955 | lr_mult: 1 1956 | decay_mult: 1 1957 | } 1958 | convolution_param { 1959 | num_output: 256 1960 | pad: 1 1961 | kernel_size: 3 1962 | stride: 1 1963 | group: 32 1964 | bias_term: false 1965 | } 1966 | } 1967 | 1968 | layer{ 1969 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1970 | type: "BN" 1971 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3" 1972 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1973 | bn_param { 1974 | frozen: true 1975 | } 1976 | } 1977 | 1978 | layer{ 1979 | name: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn/relu" 1980 | type: "ReLU" 1981 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1982 | top: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1983 | } 1984 | 1985 | layer{ 1986 | name: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 1987 | type: "Convolution" 1988 | bottom: "AttentionB_1/trunk/res1/branch1/conv2_3x3/bn" 1989 | top: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 1990 | param{ 1991 | lr_mult: 1 1992 | decay_mult: 1 1993 | } 1994 | convolution_param { 1995 | num_output: 512 1996 | pad: 0 1997 | kernel_size: 1 1998 | stride: 1 1999 | bias_term: false 2000 | } 2001 | } 2002 | 2003 | layer{ 2004 | name: "AttentionB_1/trunk/res1/branch1/conv3_1x1/bn" 2005 | type: "BN" 2006 | bottom: "AttentionB_1/trunk/res1/branch1/conv3_1x1" 2007 | top: "AttentionB_1/trunk/res1/branch1/conv3_1x1/bn" 2008 | bn_param { 2009 | frozen: true 2010 | } 2011 | } 2012 | 2013 | layer{ 2014 | name: "AttentionB_1/trunk/res1" 2015 | type: "Eltwise" 2016 | bottom: "AttentionB_1/trunk/res1/branch1/conv3_1x1/bn" 2017 | bottom: "pre_res_2" 2018 | top: "AttentionB_1/trunk/res1" 2019 | eltwise_param { 2020 | operation: SUM 2021 | } 2022 | } 2023 | 2024 | layer{ 2025 | name: "AttentionB_1/trunk/res1/relu" 2026 | type: "ReLU" 2027 | bottom: "AttentionB_1/trunk/res1" 2028 | top: "AttentionB_1/trunk/res1" 2029 | } 2030 | 2031 | layer{ 2032 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 2033 | type: "Convolution" 2034 | bottom: "AttentionB_1/trunk/res1" 2035 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 2036 | param{ 2037 | lr_mult: 1 2038 | decay_mult: 1 2039 | } 2040 | convolution_param { 2041 | num_output: 256 2042 | pad: 0 2043 | kernel_size: 1 2044 | stride: 1 2045 | bias_term: false 2046 | } 2047 | } 2048 | 2049 | layer{ 2050 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 2051 | type: "BN" 2052 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1" 2053 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 2054 | bn_param { 2055 | frozen: true 2056 | } 2057 | } 2058 | 2059 | layer{ 2060 | name: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn/relu" 2061 | type: "ReLU" 2062 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 2063 | top: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 2064 | } 2065 | 2066 | layer{ 2067 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 2068 | type: "Convolution" 2069 | bottom: "AttentionB_1/trunk/res2/branch1/conv1_1x1/bn" 2070 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 2071 | param{ 2072 | lr_mult: 1 2073 | decay_mult: 1 2074 | } 2075 | convolution_param { 2076 | num_output: 256 2077 | pad: 1 2078 | kernel_size: 3 2079 | stride: 1 2080 | group: 32 2081 | bias_term: false 2082 | } 2083 | } 2084 | 2085 | layer{ 2086 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 2087 | type: "BN" 2088 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3" 2089 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 2090 | bn_param { 2091 | frozen: true 2092 | } 2093 | } 2094 | 2095 | layer{ 2096 | name: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn/relu" 2097 | type: "ReLU" 2098 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 2099 | top: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 2100 | } 2101 | 2102 | layer{ 2103 | name: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 2104 | type: "Convolution" 2105 | bottom: "AttentionB_1/trunk/res2/branch1/conv2_3x3/bn" 2106 | top: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 2107 | param{ 2108 | lr_mult: 1 2109 | decay_mult: 1 2110 | } 2111 | convolution_param { 2112 | num_output: 512 2113 | pad: 0 2114 | kernel_size: 1 2115 | stride: 1 2116 | bias_term: false 2117 | } 2118 | } 2119 | 2120 | layer{ 2121 | name: "AttentionB_1/trunk/res2/branch1/conv3_1x1/bn" 2122 | type: "BN" 2123 | bottom: "AttentionB_1/trunk/res2/branch1/conv3_1x1" 2124 | top: "AttentionB_1/trunk/res2/branch1/conv3_1x1/bn" 2125 | bn_param { 2126 | frozen: true 2127 | } 2128 | } 2129 | 2130 | layer{ 2131 | name: "AttentionB_1/trunk/res2" 2132 | type: "Eltwise" 2133 | bottom: "AttentionB_1/trunk/res2/branch1/conv3_1x1/bn" 2134 | bottom: "AttentionB_1/trunk/res1" 2135 | top: "AttentionB_1/trunk/res2" 2136 | eltwise_param { 2137 | operation: SUM 2138 | } 2139 | } 2140 | 2141 | layer{ 2142 | name: "AttentionB_1/trunk/res2/relu" 2143 | type: "ReLU" 2144 | bottom: "AttentionB_1/trunk/res2" 2145 | top: "AttentionB_1/trunk/res2" 2146 | } 2147 | 2148 | layer{ 2149 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 2150 | type: "Convolution" 2151 | bottom: "AttentionB_1/trunk/res2" 2152 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 2153 | param{ 2154 | lr_mult: 1 2155 | decay_mult: 1 2156 | } 2157 | convolution_param { 2158 | num_output: 256 2159 | pad: 0 2160 | kernel_size: 1 2161 | stride: 1 2162 | bias_term: false 2163 | } 2164 | } 2165 | 2166 | layer{ 2167 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 2168 | type: "BN" 2169 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1" 2170 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 2171 | bn_param { 2172 | frozen: true 2173 | } 2174 | } 2175 | 2176 | layer{ 2177 | name: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn/relu" 2178 | type: "ReLU" 2179 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 2180 | top: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 2181 | } 2182 | 2183 | layer{ 2184 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 2185 | type: "Convolution" 2186 | bottom: "AttentionB_1/trunk/res3/branch1/conv1_1x1/bn" 2187 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 2188 | param{ 2189 | lr_mult: 1 2190 | decay_mult: 1 2191 | } 2192 | convolution_param { 2193 | num_output: 256 2194 | pad: 1 2195 | kernel_size: 3 2196 | stride: 1 2197 | group: 32 2198 | bias_term: false 2199 | } 2200 | } 2201 | 2202 | layer{ 2203 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 2204 | type: "BN" 2205 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3" 2206 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 2207 | bn_param { 2208 | frozen: true 2209 | } 2210 | } 2211 | 2212 | layer{ 2213 | name: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn/relu" 2214 | type: "ReLU" 2215 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 2216 | top: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 2217 | } 2218 | 2219 | layer{ 2220 | name: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 2221 | type: "Convolution" 2222 | bottom: "AttentionB_1/trunk/res3/branch1/conv2_3x3/bn" 2223 | top: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 2224 | param{ 2225 | lr_mult: 1 2226 | decay_mult: 1 2227 | } 2228 | convolution_param { 2229 | num_output: 512 2230 | pad: 0 2231 | kernel_size: 1 2232 | stride: 1 2233 | bias_term: false 2234 | } 2235 | } 2236 | 2237 | layer{ 2238 | name: "AttentionB_1/trunk/res3/branch1/conv3_1x1/bn" 2239 | type: "BN" 2240 | bottom: "AttentionB_1/trunk/res3/branch1/conv3_1x1" 2241 | top: "AttentionB_1/trunk/res3/branch1/conv3_1x1/bn" 2242 | bn_param { 2243 | frozen: true 2244 | } 2245 | } 2246 | 2247 | layer{ 2248 | name: "AttentionB_1/trunk/res3" 2249 | type: "Eltwise" 2250 | bottom: "AttentionB_1/trunk/res3/branch1/conv3_1x1/bn" 2251 | bottom: "AttentionB_1/trunk/res2" 2252 | top: "AttentionB_1/trunk/res3" 2253 | eltwise_param { 2254 | operation: SUM 2255 | } 2256 | } 2257 | 2258 | layer{ 2259 | name: "AttentionB_1/trunk/res3/relu" 2260 | type: "ReLU" 2261 | bottom: "AttentionB_1/trunk/res3" 2262 | top: "AttentionB_1/trunk/res3" 2263 | } 2264 | 2265 | layer{ 2266 | name: "AttentionB_1/mask/down/pool1_3x3_s2" 2267 | type: "Pooling" 2268 | bottom: "AttentionB_1/trunk/res1" 2269 | top: "AttentionB_1/mask/down/pool1_3x3_s2" 2270 | pooling_param { 2271 | pool: MAX 2272 | kernel_size: 3 2273 | stride: 2 2274 | } 2275 | } 2276 | 2277 | layer{ 2278 | name: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1" 2279 | type: "Convolution" 2280 | bottom: "AttentionB_1/mask/down/pool1_3x3_s2" 2281 | top: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1" 2282 | param{ 2283 | lr_mult: 1 2284 | decay_mult: 1 2285 | } 2286 | convolution_param { 2287 | num_output: 256 2288 | pad: 0 2289 | kernel_size: 1 2290 | stride: 1 2291 | bias_term: false 2292 | } 2293 | } 2294 | 2295 | layer{ 2296 | name: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn" 2297 | type: "BN" 2298 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1" 2299 | top: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn" 2300 | bn_param { 2301 | frozen: true 2302 | } 2303 | } 2304 | 2305 | layer{ 2306 | name: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn/relu" 2307 | type: "ReLU" 2308 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn" 2309 | top: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn" 2310 | } 2311 | 2312 | layer{ 2313 | name: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3" 2314 | type: "Convolution" 2315 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv1_1x1/bn" 2316 | top: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3" 2317 | param{ 2318 | lr_mult: 1 2319 | decay_mult: 1 2320 | } 2321 | convolution_param { 2322 | num_output: 256 2323 | pad: 1 2324 | kernel_size: 3 2325 | stride: 1 2326 | group: 32 2327 | bias_term: false 2328 | } 2329 | } 2330 | 2331 | layer{ 2332 | name: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn" 2333 | type: "BN" 2334 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3" 2335 | top: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn" 2336 | bn_param { 2337 | frozen: true 2338 | } 2339 | } 2340 | 2341 | layer{ 2342 | name: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn/relu" 2343 | type: "ReLU" 2344 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn" 2345 | top: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn" 2346 | } 2347 | 2348 | layer{ 2349 | name: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1" 2350 | type: "Convolution" 2351 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv2_3x3/bn" 2352 | top: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1" 2353 | param{ 2354 | lr_mult: 1 2355 | decay_mult: 1 2356 | } 2357 | convolution_param { 2358 | num_output: 512 2359 | pad: 0 2360 | kernel_size: 1 2361 | stride: 1 2362 | bias_term: false 2363 | } 2364 | } 2365 | 2366 | layer{ 2367 | name: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1/bn" 2368 | type: "BN" 2369 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1" 2370 | top: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1/bn" 2371 | bn_param { 2372 | frozen: true 2373 | } 2374 | } 2375 | 2376 | layer{ 2377 | name: "AttentionB_1/mask/down/res1_1" 2378 | type: "Eltwise" 2379 | bottom: "AttentionB_1/mask/down/res1_1/branch1/conv3_1x1/bn" 2380 | bottom: "AttentionB_1/mask/down/pool1_3x3_s2" 2381 | top: "AttentionB_1/mask/down/res1_1" 2382 | eltwise_param { 2383 | operation: SUM 2384 | } 2385 | } 2386 | 2387 | layer{ 2388 | name: "AttentionB_1/mask/down/res1_1/relu" 2389 | type: "ReLU" 2390 | bottom: "AttentionB_1/mask/down/res1_1" 2391 | top: "AttentionB_1/mask/down/res1_1" 2392 | } 2393 | 2394 | layer{ 2395 | name: "AttentionB_1/mask/down/pool2_3x3_s2" 2396 | type: "Pooling" 2397 | bottom: "AttentionB_1/mask/down/res1_1" 2398 | top: "AttentionB_1/mask/down/pool2_3x3_s2" 2399 | pooling_param { 2400 | pool: MAX 2401 | kernel_size: 3 2402 | stride: 2 2403 | } 2404 | } 2405 | 2406 | layer{ 2407 | name: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1" 2408 | type: "Convolution" 2409 | bottom: "AttentionB_1/mask/down/pool2_3x3_s2" 2410 | top: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1" 2411 | param{ 2412 | lr_mult: 1 2413 | decay_mult: 1 2414 | } 2415 | convolution_param { 2416 | num_output: 256 2417 | pad: 0 2418 | kernel_size: 1 2419 | stride: 1 2420 | bias_term: false 2421 | } 2422 | } 2423 | 2424 | layer{ 2425 | name: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn" 2426 | type: "BN" 2427 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1" 2428 | top: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn" 2429 | bn_param { 2430 | frozen: true 2431 | } 2432 | } 2433 | 2434 | layer{ 2435 | name: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn/relu" 2436 | type: "ReLU" 2437 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn" 2438 | top: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn" 2439 | } 2440 | 2441 | layer{ 2442 | name: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3" 2443 | type: "Convolution" 2444 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv1_1x1/bn" 2445 | top: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3" 2446 | param{ 2447 | lr_mult: 1 2448 | decay_mult: 1 2449 | } 2450 | convolution_param { 2451 | num_output: 256 2452 | pad: 1 2453 | kernel_size: 3 2454 | stride: 1 2455 | group: 32 2456 | bias_term: false 2457 | } 2458 | } 2459 | 2460 | layer{ 2461 | name: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn" 2462 | type: "BN" 2463 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3" 2464 | top: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn" 2465 | bn_param { 2466 | frozen: true 2467 | } 2468 | } 2469 | 2470 | layer{ 2471 | name: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn/relu" 2472 | type: "ReLU" 2473 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn" 2474 | top: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn" 2475 | } 2476 | 2477 | layer{ 2478 | name: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1" 2479 | type: "Convolution" 2480 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv2_3x3/bn" 2481 | top: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1" 2482 | param{ 2483 | lr_mult: 1 2484 | decay_mult: 1 2485 | } 2486 | convolution_param { 2487 | num_output: 512 2488 | pad: 0 2489 | kernel_size: 1 2490 | stride: 1 2491 | bias_term: false 2492 | } 2493 | } 2494 | 2495 | layer{ 2496 | name: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1/bn" 2497 | type: "BN" 2498 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1" 2499 | top: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1/bn" 2500 | bn_param { 2501 | frozen: true 2502 | } 2503 | } 2504 | 2505 | layer{ 2506 | name: "AttentionB_1/mask/down/res2_1" 2507 | type: "Eltwise" 2508 | bottom: "AttentionB_1/mask/down/res2_1/branch1/conv3_1x1/bn" 2509 | bottom: "AttentionB_1/mask/down/pool2_3x3_s2" 2510 | top: "AttentionB_1/mask/down/res2_1" 2511 | eltwise_param { 2512 | operation: SUM 2513 | } 2514 | } 2515 | 2516 | layer{ 2517 | name: "AttentionB_1/mask/down/res2_1/relu" 2518 | type: "ReLU" 2519 | bottom: "AttentionB_1/mask/down/res2_1" 2520 | top: "AttentionB_1/mask/down/res2_1" 2521 | } 2522 | 2523 | layer{ 2524 | name: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1" 2525 | type: "Convolution" 2526 | bottom: "AttentionB_1/mask/down/res2_1" 2527 | top: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1" 2528 | param{ 2529 | lr_mult: 1 2530 | decay_mult: 1 2531 | } 2532 | convolution_param { 2533 | num_output: 256 2534 | pad: 0 2535 | kernel_size: 1 2536 | stride: 1 2537 | bias_term: false 2538 | } 2539 | } 2540 | 2541 | layer{ 2542 | name: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn" 2543 | type: "BN" 2544 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1" 2545 | top: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn" 2546 | bn_param { 2547 | frozen: true 2548 | } 2549 | } 2550 | 2551 | layer{ 2552 | name: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn/relu" 2553 | type: "ReLU" 2554 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn" 2555 | top: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn" 2556 | } 2557 | 2558 | layer{ 2559 | name: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3" 2560 | type: "Convolution" 2561 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv1_1x1/bn" 2562 | top: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3" 2563 | param{ 2564 | lr_mult: 1 2565 | decay_mult: 1 2566 | } 2567 | convolution_param { 2568 | num_output: 256 2569 | pad: 1 2570 | kernel_size: 3 2571 | stride: 1 2572 | group: 32 2573 | bias_term: false 2574 | } 2575 | } 2576 | 2577 | layer{ 2578 | name: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn" 2579 | type: "BN" 2580 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3" 2581 | top: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn" 2582 | bn_param { 2583 | frozen: true 2584 | } 2585 | } 2586 | 2587 | layer{ 2588 | name: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn/relu" 2589 | type: "ReLU" 2590 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn" 2591 | top: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn" 2592 | } 2593 | 2594 | layer{ 2595 | name: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1" 2596 | type: "Convolution" 2597 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv2_3x3/bn" 2598 | top: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1" 2599 | param{ 2600 | lr_mult: 1 2601 | decay_mult: 1 2602 | } 2603 | convolution_param { 2604 | num_output: 512 2605 | pad: 0 2606 | kernel_size: 1 2607 | stride: 1 2608 | bias_term: false 2609 | } 2610 | } 2611 | 2612 | layer{ 2613 | name: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1/bn" 2614 | type: "BN" 2615 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1" 2616 | top: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1/bn" 2617 | bn_param { 2618 | frozen: true 2619 | } 2620 | } 2621 | 2622 | layer{ 2623 | name: "AttentionB_1/mask/down/res2_2" 2624 | type: "Eltwise" 2625 | bottom: "AttentionB_1/mask/down/res2_2/branch1/conv3_1x1/bn" 2626 | bottom: "AttentionB_1/mask/down/res2_1" 2627 | top: "AttentionB_1/mask/down/res2_2" 2628 | eltwise_param { 2629 | operation: SUM 2630 | } 2631 | } 2632 | 2633 | layer{ 2634 | name: "AttentionB_1/mask/down/res2_2/relu" 2635 | type: "ReLU" 2636 | bottom: "AttentionB_1/mask/down/res2_2" 2637 | top: "AttentionB_1/mask/down/res2_2" 2638 | } 2639 | 2640 | layer{ 2641 | name: "AttentionB_1/mask/up/interp_2" 2642 | type: "Interp" 2643 | bottom: "AttentionB_1/mask/down/res2_2" 2644 | bottom: "AttentionB_1/mask/down/res1_1" 2645 | top: "AttentionB_1/mask/up/interp_2" 2646 | } 2647 | 2648 | layer{ 2649 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2650 | type: "Convolution" 2651 | bottom: "AttentionB_1/mask/down/res1_1" 2652 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2653 | param{ 2654 | lr_mult: 1 2655 | decay_mult: 1 2656 | } 2657 | convolution_param { 2658 | num_output: 256 2659 | pad: 0 2660 | kernel_size: 1 2661 | stride: 1 2662 | bias_term: false 2663 | } 2664 | } 2665 | 2666 | layer{ 2667 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2668 | type: "BN" 2669 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1" 2670 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2671 | bn_param { 2672 | frozen: true 2673 | } 2674 | } 2675 | 2676 | layer{ 2677 | name: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn/relu" 2678 | type: "ReLU" 2679 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2680 | top: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2681 | } 2682 | 2683 | layer{ 2684 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2685 | type: "Convolution" 2686 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv1_1x1/bn" 2687 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2688 | param{ 2689 | lr_mult: 1 2690 | decay_mult: 1 2691 | } 2692 | convolution_param { 2693 | num_output: 256 2694 | pad: 1 2695 | kernel_size: 3 2696 | stride: 1 2697 | group: 32 2698 | bias_term: false 2699 | } 2700 | } 2701 | 2702 | layer{ 2703 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2704 | type: "BN" 2705 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3" 2706 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2707 | bn_param { 2708 | frozen: true 2709 | } 2710 | } 2711 | 2712 | layer{ 2713 | name: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn/relu" 2714 | type: "ReLU" 2715 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2716 | top: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2717 | } 2718 | 2719 | layer{ 2720 | name: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2721 | type: "Convolution" 2722 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv2_3x3/bn" 2723 | top: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2724 | param{ 2725 | lr_mult: 1 2726 | decay_mult: 1 2727 | } 2728 | convolution_param { 2729 | num_output: 512 2730 | pad: 0 2731 | kernel_size: 1 2732 | stride: 1 2733 | bias_term: false 2734 | } 2735 | } 2736 | 2737 | layer{ 2738 | name: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1/bn" 2739 | type: "BN" 2740 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1" 2741 | top: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1/bn" 2742 | bn_param { 2743 | frozen: true 2744 | } 2745 | } 2746 | 2747 | layer{ 2748 | name: "AttentionB_1/mask/skip/res1" 2749 | type: "Eltwise" 2750 | bottom: "AttentionB_1/mask/skip/res1/branch1/conv3_1x1/bn" 2751 | bottom: "AttentionB_1/mask/down/res1_1" 2752 | top: "AttentionB_1/mask/skip/res1" 2753 | eltwise_param { 2754 | operation: SUM 2755 | } 2756 | } 2757 | 2758 | layer{ 2759 | name: "AttentionB_1/mask/skip/res1/relu" 2760 | type: "ReLU" 2761 | bottom: "AttentionB_1/mask/skip/res1" 2762 | top: "AttentionB_1/mask/skip/res1" 2763 | } 2764 | 2765 | layer{ 2766 | name: "AttentionB_1/mask/up1" 2767 | type: "Eltwise" 2768 | bottom: "AttentionB_1/mask/skip/res1" 2769 | bottom: "AttentionB_1/mask/up/interp_2" 2770 | top: "AttentionB_1/mask/up1" 2771 | eltwise_param { 2772 | operation: SUM 2773 | } 2774 | } 2775 | 2776 | layer{ 2777 | name: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1" 2778 | type: "Convolution" 2779 | bottom: "AttentionB_1/mask/up1" 2780 | top: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1" 2781 | param{ 2782 | lr_mult: 1 2783 | decay_mult: 1 2784 | } 2785 | convolution_param { 2786 | num_output: 256 2787 | pad: 0 2788 | kernel_size: 1 2789 | stride: 1 2790 | bias_term: false 2791 | } 2792 | } 2793 | 2794 | layer{ 2795 | name: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn" 2796 | type: "BN" 2797 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1" 2798 | top: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn" 2799 | bn_param { 2800 | frozen: true 2801 | } 2802 | } 2803 | 2804 | layer{ 2805 | name: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn/relu" 2806 | type: "ReLU" 2807 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn" 2808 | top: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn" 2809 | } 2810 | 2811 | layer{ 2812 | name: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3" 2813 | type: "Convolution" 2814 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv1_1x1/bn" 2815 | top: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3" 2816 | param{ 2817 | lr_mult: 1 2818 | decay_mult: 1 2819 | } 2820 | convolution_param { 2821 | num_output: 256 2822 | pad: 1 2823 | kernel_size: 3 2824 | stride: 1 2825 | group: 32 2826 | bias_term: false 2827 | } 2828 | } 2829 | 2830 | layer{ 2831 | name: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn" 2832 | type: "BN" 2833 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3" 2834 | top: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn" 2835 | bn_param { 2836 | frozen: true 2837 | } 2838 | } 2839 | 2840 | layer{ 2841 | name: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn/relu" 2842 | type: "ReLU" 2843 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn" 2844 | top: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn" 2845 | } 2846 | 2847 | layer{ 2848 | name: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1" 2849 | type: "Convolution" 2850 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv2_3x3/bn" 2851 | top: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1" 2852 | param{ 2853 | lr_mult: 1 2854 | decay_mult: 1 2855 | } 2856 | convolution_param { 2857 | num_output: 512 2858 | pad: 0 2859 | kernel_size: 1 2860 | stride: 1 2861 | bias_term: false 2862 | } 2863 | } 2864 | 2865 | layer{ 2866 | name: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1/bn" 2867 | type: "BN" 2868 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1" 2869 | top: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1/bn" 2870 | bn_param { 2871 | frozen: true 2872 | } 2873 | } 2874 | 2875 | layer{ 2876 | name: "AttentionB_1/mask/up/res1_1" 2877 | type: "Eltwise" 2878 | bottom: "AttentionB_1/mask/up/res1_1/branch1/conv3_1x1/bn" 2879 | bottom: "AttentionB_1/mask/up1" 2880 | top: "AttentionB_1/mask/up/res1_1" 2881 | eltwise_param { 2882 | operation: SUM 2883 | } 2884 | } 2885 | 2886 | layer{ 2887 | name: "AttentionB_1/mask/up/res1_1/relu" 2888 | type: "ReLU" 2889 | bottom: "AttentionB_1/mask/up/res1_1" 2890 | top: "AttentionB_1/mask/up/res1_1" 2891 | } 2892 | 2893 | layer{ 2894 | name: "AttentionB_1/mask/up/interp_1" 2895 | type: "Interp" 2896 | bottom: "AttentionB_1/mask/up/res1_1" 2897 | bottom: "AttentionB_1/trunk/res3" 2898 | top: "AttentionB_1/mask/up/interp_1" 2899 | } 2900 | 2901 | layer{ 2902 | name: "AttentionB_1/mask/linear_1" 2903 | type: "Convolution" 2904 | bottom: "AttentionB_1/mask/up/interp_1" 2905 | top: "AttentionB_1/mask/linear_1" 2906 | param{ 2907 | lr_mult: 1 2908 | decay_mult: 1 2909 | } 2910 | convolution_param { 2911 | num_output: 512 2912 | pad: 0 2913 | kernel_size: 1 2914 | stride: 1 2915 | bias_term: false 2916 | } 2917 | } 2918 | 2919 | layer{ 2920 | name: "AttentionB_1/mask/linear_1/bn" 2921 | type: "BN" 2922 | bottom: "AttentionB_1/mask/linear_1" 2923 | top: "AttentionB_1/mask/linear_1/bn" 2924 | bn_param { 2925 | frozen: true 2926 | } 2927 | } 2928 | 2929 | layer{ 2930 | name: "AttentionB_1/mask/linear_1/bn/relu" 2931 | type: "ReLU" 2932 | bottom: "AttentionB_1/mask/linear_1/bn" 2933 | top: "AttentionB_1/mask/linear_1/bn" 2934 | } 2935 | 2936 | layer{ 2937 | name: "AttentionB_1/mask/linear_2" 2938 | type: "Convolution" 2939 | bottom: "AttentionB_1/mask/linear_1/bn" 2940 | top: "AttentionB_1/mask/linear_2" 2941 | param{ 2942 | lr_mult: 1 2943 | decay_mult: 1 2944 | } 2945 | convolution_param { 2946 | num_output: 512 2947 | pad: 0 2948 | kernel_size: 1 2949 | stride: 1 2950 | bias_term: false 2951 | } 2952 | } 2953 | 2954 | layer{ 2955 | name: "AttentionB_1/mask" 2956 | type: "Sigmoid" 2957 | bottom: "AttentionB_1/mask/linear_2" 2958 | top: "AttentionB_1/mask" 2959 | } 2960 | 2961 | layer{ 2962 | name: "AttentionB_1/attention_residual" 2963 | type: "Eltwise" 2964 | bottom: "AttentionB_1/trunk/res3" 2965 | bottom: "AttentionB_1/mask" 2966 | top: "AttentionB_1/attention_residual" 2967 | eltwise_param { 2968 | operation: PROD 2969 | } 2970 | } 2971 | 2972 | layer{ 2973 | name: "AttentionB_1/fusion" 2974 | type: "Eltwise" 2975 | bottom: "AttentionB_1/attention_residual" 2976 | bottom: "AttentionB_1/trunk/res3" 2977 | top: "AttentionB_1/fusion" 2978 | eltwise_param { 2979 | operation: SUM 2980 | } 2981 | } 2982 | 2983 | layer{ 2984 | name: "AttentionB_1/branch1/conv1_1x1" 2985 | type: "Convolution" 2986 | bottom: "AttentionB_1/fusion" 2987 | top: "AttentionB_1/branch1/conv1_1x1" 2988 | param{ 2989 | lr_mult: 1 2990 | decay_mult: 1 2991 | } 2992 | convolution_param { 2993 | num_output: 256 2994 | pad: 0 2995 | kernel_size: 1 2996 | stride: 1 2997 | bias_term: false 2998 | } 2999 | } 3000 | 3001 | layer{ 3002 | name: "AttentionB_1/branch1/conv1_1x1/bn" 3003 | type: "BN" 3004 | bottom: "AttentionB_1/branch1/conv1_1x1" 3005 | top: "AttentionB_1/branch1/conv1_1x1/bn" 3006 | bn_param { 3007 | frozen: true 3008 | } 3009 | } 3010 | 3011 | layer{ 3012 | name: "AttentionB_1/branch1/conv1_1x1/bn/relu" 3013 | type: "ReLU" 3014 | bottom: "AttentionB_1/branch1/conv1_1x1/bn" 3015 | top: "AttentionB_1/branch1/conv1_1x1/bn" 3016 | } 3017 | 3018 | layer{ 3019 | name: "AttentionB_1/branch1/conv2_3x3" 3020 | type: "Convolution" 3021 | bottom: "AttentionB_1/branch1/conv1_1x1/bn" 3022 | top: "AttentionB_1/branch1/conv2_3x3" 3023 | param{ 3024 | lr_mult: 1 3025 | decay_mult: 1 3026 | } 3027 | convolution_param { 3028 | num_output: 256 3029 | pad: 1 3030 | kernel_size: 3 3031 | stride: 1 3032 | group: 32 3033 | bias_term: false 3034 | } 3035 | } 3036 | 3037 | layer{ 3038 | name: "AttentionB_1/branch1/conv2_3x3/bn" 3039 | type: "BN" 3040 | bottom: "AttentionB_1/branch1/conv2_3x3" 3041 | top: "AttentionB_1/branch1/conv2_3x3/bn" 3042 | bn_param { 3043 | frozen: true 3044 | } 3045 | } 3046 | 3047 | layer{ 3048 | name: "AttentionB_1/branch1/conv2_3x3/bn/relu" 3049 | type: "ReLU" 3050 | bottom: "AttentionB_1/branch1/conv2_3x3/bn" 3051 | top: "AttentionB_1/branch1/conv2_3x3/bn" 3052 | } 3053 | 3054 | layer{ 3055 | name: "AttentionB_1/branch1/conv3_1x1" 3056 | type: "Convolution" 3057 | bottom: "AttentionB_1/branch1/conv2_3x3/bn" 3058 | top: "AttentionB_1/branch1/conv3_1x1" 3059 | param{ 3060 | lr_mult: 1 3061 | decay_mult: 1 3062 | } 3063 | convolution_param { 3064 | num_output: 512 3065 | pad: 0 3066 | kernel_size: 1 3067 | stride: 1 3068 | bias_term: false 3069 | } 3070 | } 3071 | 3072 | layer{ 3073 | name: "AttentionB_1/branch1/conv3_1x1/bn" 3074 | type: "BN" 3075 | bottom: "AttentionB_1/branch1/conv3_1x1" 3076 | top: "AttentionB_1/branch1/conv3_1x1/bn" 3077 | bn_param { 3078 | frozen: true 3079 | } 3080 | } 3081 | 3082 | layer{ 3083 | name: "AttentionB_1" 3084 | type: "Eltwise" 3085 | bottom: "AttentionB_1/branch1/conv3_1x1/bn" 3086 | bottom: "AttentionB_1/fusion" 3087 | top: "AttentionB_1" 3088 | eltwise_param { 3089 | operation: SUM 3090 | } 3091 | } 3092 | 3093 | layer{ 3094 | name: "AttentionB_1/relu" 3095 | type: "ReLU" 3096 | bottom: "AttentionB_1" 3097 | top: "AttentionB_1" 3098 | } 3099 | 3100 | layer{ 3101 | name: "pre_res_3/branch1/conv1_1x1" 3102 | type: "Convolution" 3103 | bottom: "AttentionB_1" 3104 | top: "pre_res_3/branch1/conv1_1x1" 3105 | param{ 3106 | lr_mult: 1 3107 | decay_mult: 1 3108 | } 3109 | convolution_param { 3110 | num_output: 512 3111 | pad: 0 3112 | kernel_size: 1 3113 | stride: 1 3114 | bias_term: false 3115 | } 3116 | } 3117 | 3118 | layer{ 3119 | name: "pre_res_3/branch1/conv1_1x1/bn" 3120 | type: "BN" 3121 | bottom: "pre_res_3/branch1/conv1_1x1" 3122 | top: "pre_res_3/branch1/conv1_1x1/bn" 3123 | bn_param { 3124 | frozen: true 3125 | } 3126 | } 3127 | 3128 | layer{ 3129 | name: "pre_res_3/branch1/conv1_1x1/bn/relu" 3130 | type: "ReLU" 3131 | bottom: "pre_res_3/branch1/conv1_1x1/bn" 3132 | top: "pre_res_3/branch1/conv1_1x1/bn" 3133 | } 3134 | 3135 | layer{ 3136 | name: "pre_res_3/branch1/conv2_3x3" 3137 | type: "Convolution" 3138 | bottom: "pre_res_3/branch1/conv1_1x1/bn" 3139 | top: "pre_res_3/branch1/conv2_3x3" 3140 | param{ 3141 | lr_mult: 1 3142 | decay_mult: 1 3143 | } 3144 | convolution_param { 3145 | num_output: 512 3146 | pad: 1 3147 | kernel_size: 3 3148 | stride: 2 3149 | group: 32 3150 | bias_term: false 3151 | } 3152 | } 3153 | 3154 | layer{ 3155 | name: "pre_res_3/branch1/conv2_3x3/bn" 3156 | type: "BN" 3157 | bottom: "pre_res_3/branch1/conv2_3x3" 3158 | top: "pre_res_3/branch1/conv2_3x3/bn" 3159 | bn_param { 3160 | frozen: true 3161 | } 3162 | } 3163 | 3164 | layer{ 3165 | name: "pre_res_3/branch1/conv2_3x3/bn/relu" 3166 | type: "ReLU" 3167 | bottom: "pre_res_3/branch1/conv2_3x3/bn" 3168 | top: "pre_res_3/branch1/conv2_3x3/bn" 3169 | } 3170 | 3171 | layer{ 3172 | name: "pre_res_3/branch1/conv3_1x1" 3173 | type: "Convolution" 3174 | bottom: "pre_res_3/branch1/conv2_3x3/bn" 3175 | top: "pre_res_3/branch1/conv3_1x1" 3176 | param{ 3177 | lr_mult: 1 3178 | decay_mult: 1 3179 | } 3180 | convolution_param { 3181 | num_output: 1024 3182 | pad: 0 3183 | kernel_size: 1 3184 | stride: 1 3185 | bias_term: false 3186 | } 3187 | } 3188 | 3189 | layer{ 3190 | name: "pre_res_3/branch1/conv3_1x1/bn" 3191 | type: "BN" 3192 | bottom: "pre_res_3/branch1/conv3_1x1" 3193 | top: "pre_res_3/branch1/conv3_1x1/bn" 3194 | bn_param { 3195 | frozen: true 3196 | } 3197 | } 3198 | 3199 | layer{ 3200 | name: "pre_res_3/branch2/conv1_1x1" 3201 | type: "Convolution" 3202 | bottom: "AttentionB_1" 3203 | top: "pre_res_3/branch2/conv1_1x1" 3204 | param{ 3205 | lr_mult: 1 3206 | decay_mult: 1 3207 | } 3208 | convolution_param { 3209 | num_output: 1024 3210 | pad: 0 3211 | kernel_size: 1 3212 | stride: 2 3213 | bias_term: false 3214 | } 3215 | } 3216 | 3217 | layer{ 3218 | name: "pre_res_3/branch2/conv1_1x1/bn" 3219 | type: "BN" 3220 | bottom: "pre_res_3/branch2/conv1_1x1" 3221 | top: "pre_res_3/branch2/conv1_1x1/bn" 3222 | bn_param { 3223 | frozen: true 3224 | } 3225 | } 3226 | 3227 | layer{ 3228 | name: "pre_res_3" 3229 | type: "Eltwise" 3230 | bottom: "pre_res_3/branch2/conv1_1x1/bn" 3231 | bottom: "pre_res_3/branch1/conv3_1x1/bn" 3232 | top: "pre_res_3" 3233 | eltwise_param { 3234 | operation: SUM 3235 | } 3236 | } 3237 | 3238 | layer{ 3239 | name: "pre_res_3/relu" 3240 | type: "ReLU" 3241 | bottom: "pre_res_3" 3242 | top: "pre_res_3" 3243 | } 3244 | 3245 | layer{ 3246 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 3247 | type: "Convolution" 3248 | bottom: "pre_res_3" 3249 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 3250 | param{ 3251 | lr_mult: 1 3252 | decay_mult: 1 3253 | } 3254 | convolution_param { 3255 | num_output: 512 3256 | pad: 0 3257 | kernel_size: 1 3258 | stride: 1 3259 | bias_term: false 3260 | } 3261 | } 3262 | 3263 | layer{ 3264 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 3265 | type: "BN" 3266 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1" 3267 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 3268 | bn_param { 3269 | frozen: true 3270 | } 3271 | } 3272 | 3273 | layer{ 3274 | name: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn/relu" 3275 | type: "ReLU" 3276 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 3277 | top: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 3278 | } 3279 | 3280 | layer{ 3281 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 3282 | type: "Convolution" 3283 | bottom: "AttentionC_1/trunk/res1/branch1/conv1_1x1/bn" 3284 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 3285 | param{ 3286 | lr_mult: 1 3287 | decay_mult: 1 3288 | } 3289 | convolution_param { 3290 | num_output: 512 3291 | pad: 1 3292 | kernel_size: 3 3293 | stride: 1 3294 | group: 32 3295 | bias_term: false 3296 | } 3297 | } 3298 | 3299 | layer{ 3300 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 3301 | type: "BN" 3302 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3" 3303 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 3304 | bn_param { 3305 | frozen: true 3306 | } 3307 | } 3308 | 3309 | layer{ 3310 | name: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn/relu" 3311 | type: "ReLU" 3312 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 3313 | top: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 3314 | } 3315 | 3316 | layer{ 3317 | name: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 3318 | type: "Convolution" 3319 | bottom: "AttentionC_1/trunk/res1/branch1/conv2_3x3/bn" 3320 | top: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 3321 | param{ 3322 | lr_mult: 1 3323 | decay_mult: 1 3324 | } 3325 | convolution_param { 3326 | num_output: 1024 3327 | pad: 0 3328 | kernel_size: 1 3329 | stride: 1 3330 | bias_term: false 3331 | } 3332 | } 3333 | 3334 | layer{ 3335 | name: "AttentionC_1/trunk/res1/branch1/conv3_1x1/bn" 3336 | type: "BN" 3337 | bottom: "AttentionC_1/trunk/res1/branch1/conv3_1x1" 3338 | top: "AttentionC_1/trunk/res1/branch1/conv3_1x1/bn" 3339 | bn_param { 3340 | frozen: true 3341 | } 3342 | } 3343 | 3344 | layer{ 3345 | name: "AttentionC_1/trunk/res1" 3346 | type: "Eltwise" 3347 | bottom: "AttentionC_1/trunk/res1/branch1/conv3_1x1/bn" 3348 | bottom: "pre_res_3" 3349 | top: "AttentionC_1/trunk/res1" 3350 | eltwise_param { 3351 | operation: SUM 3352 | } 3353 | } 3354 | 3355 | layer{ 3356 | name: "AttentionC_1/trunk/res1/relu" 3357 | type: "ReLU" 3358 | bottom: "AttentionC_1/trunk/res1" 3359 | top: "AttentionC_1/trunk/res1" 3360 | } 3361 | 3362 | layer{ 3363 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3364 | type: "Convolution" 3365 | bottom: "AttentionC_1/trunk/res1" 3366 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3367 | param{ 3368 | lr_mult: 1 3369 | decay_mult: 1 3370 | } 3371 | convolution_param { 3372 | num_output: 512 3373 | pad: 0 3374 | kernel_size: 1 3375 | stride: 1 3376 | bias_term: false 3377 | } 3378 | } 3379 | 3380 | layer{ 3381 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3382 | type: "BN" 3383 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1" 3384 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3385 | bn_param { 3386 | frozen: true 3387 | } 3388 | } 3389 | 3390 | layer{ 3391 | name: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn/relu" 3392 | type: "ReLU" 3393 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3394 | top: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3395 | } 3396 | 3397 | layer{ 3398 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3399 | type: "Convolution" 3400 | bottom: "AttentionC_1/trunk/res2/branch1/conv1_1x1/bn" 3401 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3402 | param{ 3403 | lr_mult: 1 3404 | decay_mult: 1 3405 | } 3406 | convolution_param { 3407 | num_output: 512 3408 | pad: 1 3409 | kernel_size: 3 3410 | stride: 1 3411 | group: 32 3412 | bias_term: false 3413 | } 3414 | } 3415 | 3416 | layer{ 3417 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3418 | type: "BN" 3419 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3" 3420 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3421 | bn_param { 3422 | frozen: true 3423 | } 3424 | } 3425 | 3426 | layer{ 3427 | name: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn/relu" 3428 | type: "ReLU" 3429 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3430 | top: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3431 | } 3432 | 3433 | layer{ 3434 | name: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3435 | type: "Convolution" 3436 | bottom: "AttentionC_1/trunk/res2/branch1/conv2_3x3/bn" 3437 | top: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3438 | param{ 3439 | lr_mult: 1 3440 | decay_mult: 1 3441 | } 3442 | convolution_param { 3443 | num_output: 1024 3444 | pad: 0 3445 | kernel_size: 1 3446 | stride: 1 3447 | bias_term: false 3448 | } 3449 | } 3450 | 3451 | layer{ 3452 | name: "AttentionC_1/trunk/res2/branch1/conv3_1x1/bn" 3453 | type: "BN" 3454 | bottom: "AttentionC_1/trunk/res2/branch1/conv3_1x1" 3455 | top: "AttentionC_1/trunk/res2/branch1/conv3_1x1/bn" 3456 | bn_param { 3457 | frozen: true 3458 | } 3459 | } 3460 | 3461 | layer{ 3462 | name: "AttentionC_1/trunk/res2" 3463 | type: "Eltwise" 3464 | bottom: "AttentionC_1/trunk/res2/branch1/conv3_1x1/bn" 3465 | bottom: "AttentionC_1/trunk/res1" 3466 | top: "AttentionC_1/trunk/res2" 3467 | eltwise_param { 3468 | operation: SUM 3469 | } 3470 | } 3471 | 3472 | layer{ 3473 | name: "AttentionC_1/trunk/res2/relu" 3474 | type: "ReLU" 3475 | bottom: "AttentionC_1/trunk/res2" 3476 | top: "AttentionC_1/trunk/res2" 3477 | } 3478 | 3479 | layer{ 3480 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3481 | type: "Convolution" 3482 | bottom: "AttentionC_1/trunk/res2" 3483 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3484 | param{ 3485 | lr_mult: 1 3486 | decay_mult: 1 3487 | } 3488 | convolution_param { 3489 | num_output: 512 3490 | pad: 0 3491 | kernel_size: 1 3492 | stride: 1 3493 | bias_term: false 3494 | } 3495 | } 3496 | 3497 | layer{ 3498 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3499 | type: "BN" 3500 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1" 3501 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3502 | bn_param { 3503 | frozen: true 3504 | } 3505 | } 3506 | 3507 | layer{ 3508 | name: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn/relu" 3509 | type: "ReLU" 3510 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3511 | top: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3512 | } 3513 | 3514 | layer{ 3515 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3516 | type: "Convolution" 3517 | bottom: "AttentionC_1/trunk/res3/branch1/conv1_1x1/bn" 3518 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3519 | param{ 3520 | lr_mult: 1 3521 | decay_mult: 1 3522 | } 3523 | convolution_param { 3524 | num_output: 512 3525 | pad: 1 3526 | kernel_size: 3 3527 | stride: 1 3528 | group: 32 3529 | bias_term: false 3530 | } 3531 | } 3532 | 3533 | layer{ 3534 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3535 | type: "BN" 3536 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3" 3537 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3538 | bn_param { 3539 | frozen: true 3540 | } 3541 | } 3542 | 3543 | layer{ 3544 | name: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn/relu" 3545 | type: "ReLU" 3546 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3547 | top: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3548 | } 3549 | 3550 | layer{ 3551 | name: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3552 | type: "Convolution" 3553 | bottom: "AttentionC_1/trunk/res3/branch1/conv2_3x3/bn" 3554 | top: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3555 | param{ 3556 | lr_mult: 1 3557 | decay_mult: 1 3558 | } 3559 | convolution_param { 3560 | num_output: 1024 3561 | pad: 0 3562 | kernel_size: 1 3563 | stride: 1 3564 | bias_term: false 3565 | } 3566 | } 3567 | 3568 | layer{ 3569 | name: "AttentionC_1/trunk/res3/branch1/conv3_1x1/bn" 3570 | type: "BN" 3571 | bottom: "AttentionC_1/trunk/res3/branch1/conv3_1x1" 3572 | top: "AttentionC_1/trunk/res3/branch1/conv3_1x1/bn" 3573 | bn_param { 3574 | frozen: true 3575 | } 3576 | } 3577 | 3578 | layer{ 3579 | name: "AttentionC_1/trunk/res3" 3580 | type: "Eltwise" 3581 | bottom: "AttentionC_1/trunk/res3/branch1/conv3_1x1/bn" 3582 | bottom: "AttentionC_1/trunk/res2" 3583 | top: "AttentionC_1/trunk/res3" 3584 | eltwise_param { 3585 | operation: SUM 3586 | } 3587 | } 3588 | 3589 | layer{ 3590 | name: "AttentionC_1/trunk/res3/relu" 3591 | type: "ReLU" 3592 | bottom: "AttentionC_1/trunk/res3" 3593 | top: "AttentionC_1/trunk/res3" 3594 | } 3595 | 3596 | layer{ 3597 | name: "AttentionC_1/mask/down/pool1_3x3_s2" 3598 | type: "Pooling" 3599 | bottom: "AttentionC_1/trunk/res1" 3600 | top: "AttentionC_1/mask/down/pool1_3x3_s2" 3601 | pooling_param { 3602 | pool: MAX 3603 | kernel_size: 3 3604 | stride: 2 3605 | } 3606 | } 3607 | 3608 | layer{ 3609 | name: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1" 3610 | type: "Convolution" 3611 | bottom: "AttentionC_1/mask/down/pool1_3x3_s2" 3612 | top: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1" 3613 | param{ 3614 | lr_mult: 1 3615 | decay_mult: 1 3616 | } 3617 | convolution_param { 3618 | num_output: 512 3619 | pad: 0 3620 | kernel_size: 1 3621 | stride: 1 3622 | bias_term: false 3623 | } 3624 | } 3625 | 3626 | layer{ 3627 | name: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn" 3628 | type: "BN" 3629 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1" 3630 | top: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn" 3631 | bn_param { 3632 | frozen: true 3633 | } 3634 | } 3635 | 3636 | layer{ 3637 | name: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn/relu" 3638 | type: "ReLU" 3639 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn" 3640 | top: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn" 3641 | } 3642 | 3643 | layer{ 3644 | name: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3" 3645 | type: "Convolution" 3646 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv1_1x1/bn" 3647 | top: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3" 3648 | param{ 3649 | lr_mult: 1 3650 | decay_mult: 1 3651 | } 3652 | convolution_param { 3653 | num_output: 512 3654 | pad: 1 3655 | kernel_size: 3 3656 | stride: 1 3657 | group: 32 3658 | bias_term: false 3659 | } 3660 | } 3661 | 3662 | layer{ 3663 | name: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn" 3664 | type: "BN" 3665 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3" 3666 | top: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn" 3667 | bn_param { 3668 | frozen: true 3669 | } 3670 | } 3671 | 3672 | layer{ 3673 | name: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn/relu" 3674 | type: "ReLU" 3675 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn" 3676 | top: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn" 3677 | } 3678 | 3679 | layer{ 3680 | name: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1" 3681 | type: "Convolution" 3682 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv2_3x3/bn" 3683 | top: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1" 3684 | param{ 3685 | lr_mult: 1 3686 | decay_mult: 1 3687 | } 3688 | convolution_param { 3689 | num_output: 1024 3690 | pad: 0 3691 | kernel_size: 1 3692 | stride: 1 3693 | bias_term: false 3694 | } 3695 | } 3696 | 3697 | layer{ 3698 | name: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1/bn" 3699 | type: "BN" 3700 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1" 3701 | top: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1/bn" 3702 | bn_param { 3703 | frozen: true 3704 | } 3705 | } 3706 | 3707 | layer{ 3708 | name: "AttentionC_1/mask/down/res1_1" 3709 | type: "Eltwise" 3710 | bottom: "AttentionC_1/mask/down/res1_1/branch1/conv3_1x1/bn" 3711 | bottom: "AttentionC_1/mask/down/pool1_3x3_s2" 3712 | top: "AttentionC_1/mask/down/res1_1" 3713 | eltwise_param { 3714 | operation: SUM 3715 | } 3716 | } 3717 | 3718 | layer{ 3719 | name: "AttentionC_1/mask/down/res1_1/relu" 3720 | type: "ReLU" 3721 | bottom: "AttentionC_1/mask/down/res1_1" 3722 | top: "AttentionC_1/mask/down/res1_1" 3723 | } 3724 | 3725 | layer{ 3726 | name: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1" 3727 | type: "Convolution" 3728 | bottom: "AttentionC_1/mask/down/res1_1" 3729 | top: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1" 3730 | param{ 3731 | lr_mult: 1 3732 | decay_mult: 1 3733 | } 3734 | convolution_param { 3735 | num_output: 512 3736 | pad: 0 3737 | kernel_size: 1 3738 | stride: 1 3739 | bias_term: false 3740 | } 3741 | } 3742 | 3743 | layer{ 3744 | name: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn" 3745 | type: "BN" 3746 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1" 3747 | top: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn" 3748 | bn_param { 3749 | frozen: true 3750 | } 3751 | } 3752 | 3753 | layer{ 3754 | name: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn/relu" 3755 | type: "ReLU" 3756 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn" 3757 | top: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn" 3758 | } 3759 | 3760 | layer{ 3761 | name: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3" 3762 | type: "Convolution" 3763 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv1_1x1/bn" 3764 | top: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3" 3765 | param{ 3766 | lr_mult: 1 3767 | decay_mult: 1 3768 | } 3769 | convolution_param { 3770 | num_output: 512 3771 | pad: 1 3772 | kernel_size: 3 3773 | stride: 1 3774 | group: 32 3775 | bias_term: false 3776 | } 3777 | } 3778 | 3779 | layer{ 3780 | name: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn" 3781 | type: "BN" 3782 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3" 3783 | top: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn" 3784 | bn_param { 3785 | frozen: true 3786 | } 3787 | } 3788 | 3789 | layer{ 3790 | name: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn/relu" 3791 | type: "ReLU" 3792 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn" 3793 | top: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn" 3794 | } 3795 | 3796 | layer{ 3797 | name: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1" 3798 | type: "Convolution" 3799 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv2_3x3/bn" 3800 | top: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1" 3801 | param{ 3802 | lr_mult: 1 3803 | decay_mult: 1 3804 | } 3805 | convolution_param { 3806 | num_output: 1024 3807 | pad: 0 3808 | kernel_size: 1 3809 | stride: 1 3810 | bias_term: false 3811 | } 3812 | } 3813 | 3814 | layer{ 3815 | name: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1/bn" 3816 | type: "BN" 3817 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1" 3818 | top: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1/bn" 3819 | bn_param { 3820 | frozen: true 3821 | } 3822 | } 3823 | 3824 | layer{ 3825 | name: "AttentionC_1/mask/down/res1_2" 3826 | type: "Eltwise" 3827 | bottom: "AttentionC_1/mask/down/res1_2/branch1/conv3_1x1/bn" 3828 | bottom: "AttentionC_1/mask/down/res1_1" 3829 | top: "AttentionC_1/mask/down/res1_2" 3830 | eltwise_param { 3831 | operation: SUM 3832 | } 3833 | } 3834 | 3835 | layer{ 3836 | name: "AttentionC_1/mask/down/res1_2/relu" 3837 | type: "ReLU" 3838 | bottom: "AttentionC_1/mask/down/res1_2" 3839 | top: "AttentionC_1/mask/down/res1_2" 3840 | } 3841 | 3842 | layer{ 3843 | name: "AttentionC_1/mask/up/interp_1" 3844 | type: "Interp" 3845 | bottom: "AttentionC_1/mask/down/res1_2" 3846 | bottom: "AttentionC_1/trunk/res3" 3847 | top: "AttentionC_1/mask/up/interp_1" 3848 | } 3849 | 3850 | layer{ 3851 | name: "AttentionC_1/mask/linear_1" 3852 | type: "Convolution" 3853 | bottom: "AttentionC_1/mask/up/interp_1" 3854 | top: "AttentionC_1/mask/linear_1" 3855 | param{ 3856 | lr_mult: 1 3857 | decay_mult: 1 3858 | } 3859 | convolution_param { 3860 | num_output: 1024 3861 | pad: 0 3862 | kernel_size: 1 3863 | stride: 1 3864 | bias_term: false 3865 | } 3866 | } 3867 | 3868 | layer{ 3869 | name: "AttentionC_1/mask/linear_1/bn" 3870 | type: "BN" 3871 | bottom: "AttentionC_1/mask/linear_1" 3872 | top: "AttentionC_1/mask/linear_1/bn" 3873 | bn_param { 3874 | frozen: true 3875 | } 3876 | } 3877 | 3878 | layer{ 3879 | name: "AttentionC_1/mask/linear_1/bn/relu" 3880 | type: "ReLU" 3881 | bottom: "AttentionC_1/mask/linear_1/bn" 3882 | top: "AttentionC_1/mask/linear_1/bn" 3883 | } 3884 | 3885 | layer{ 3886 | name: "AttentionC_1/mask/linear_2" 3887 | type: "Convolution" 3888 | bottom: "AttentionC_1/mask/linear_1/bn" 3889 | top: "AttentionC_1/mask/linear_2" 3890 | param{ 3891 | lr_mult: 1 3892 | decay_mult: 1 3893 | } 3894 | convolution_param { 3895 | num_output: 1024 3896 | pad: 0 3897 | kernel_size: 1 3898 | stride: 1 3899 | bias_term: false 3900 | } 3901 | } 3902 | 3903 | layer{ 3904 | name: "AttentionC_1/mask" 3905 | type: "Sigmoid" 3906 | bottom: "AttentionC_1/mask/linear_2" 3907 | top: "AttentionC_1/mask" 3908 | } 3909 | 3910 | layer{ 3911 | name: "AttentionC_1/attention_residual" 3912 | type: "Eltwise" 3913 | bottom: "AttentionC_1/trunk/res3" 3914 | bottom: "AttentionC_1/mask" 3915 | top: "AttentionC_1/attention_residual" 3916 | eltwise_param { 3917 | operation: PROD 3918 | } 3919 | } 3920 | 3921 | layer{ 3922 | name: "AttentionC_1/fusion" 3923 | type: "Eltwise" 3924 | bottom: "AttentionC_1/attention_residual" 3925 | bottom: "AttentionC_1/trunk/res3" 3926 | top: "AttentionC_1/fusion" 3927 | eltwise_param { 3928 | operation: SUM 3929 | } 3930 | } 3931 | 3932 | layer{ 3933 | name: "AttentionC_1/branch1/conv1_1x1" 3934 | type: "Convolution" 3935 | bottom: "AttentionC_1/fusion" 3936 | top: "AttentionC_1/branch1/conv1_1x1" 3937 | param{ 3938 | lr_mult: 1 3939 | decay_mult: 1 3940 | } 3941 | convolution_param { 3942 | num_output: 512 3943 | pad: 0 3944 | kernel_size: 1 3945 | stride: 1 3946 | bias_term: false 3947 | } 3948 | } 3949 | 3950 | layer{ 3951 | name: "AttentionC_1/branch1/conv1_1x1/bn" 3952 | type: "BN" 3953 | bottom: "AttentionC_1/branch1/conv1_1x1" 3954 | top: "AttentionC_1/branch1/conv1_1x1/bn" 3955 | bn_param { 3956 | frozen: true 3957 | } 3958 | } 3959 | 3960 | layer{ 3961 | name: "AttentionC_1/branch1/conv1_1x1/bn/relu" 3962 | type: "ReLU" 3963 | bottom: "AttentionC_1/branch1/conv1_1x1/bn" 3964 | top: "AttentionC_1/branch1/conv1_1x1/bn" 3965 | } 3966 | 3967 | layer{ 3968 | name: "AttentionC_1/branch1/conv2_3x3" 3969 | type: "Convolution" 3970 | bottom: "AttentionC_1/branch1/conv1_1x1/bn" 3971 | top: "AttentionC_1/branch1/conv2_3x3" 3972 | param{ 3973 | lr_mult: 1 3974 | decay_mult: 1 3975 | } 3976 | convolution_param { 3977 | num_output: 512 3978 | pad: 1 3979 | kernel_size: 3 3980 | stride: 1 3981 | group: 32 3982 | bias_term: false 3983 | } 3984 | } 3985 | 3986 | layer{ 3987 | name: "AttentionC_1/branch1/conv2_3x3/bn" 3988 | type: "BN" 3989 | bottom: "AttentionC_1/branch1/conv2_3x3" 3990 | top: "AttentionC_1/branch1/conv2_3x3/bn" 3991 | bn_param { 3992 | frozen: true 3993 | } 3994 | } 3995 | 3996 | layer{ 3997 | name: "AttentionC_1/branch1/conv2_3x3/bn/relu" 3998 | type: "ReLU" 3999 | bottom: "AttentionC_1/branch1/conv2_3x3/bn" 4000 | top: "AttentionC_1/branch1/conv2_3x3/bn" 4001 | } 4002 | 4003 | layer{ 4004 | name: "AttentionC_1/branch1/conv3_1x1" 4005 | type: "Convolution" 4006 | bottom: "AttentionC_1/branch1/conv2_3x3/bn" 4007 | top: "AttentionC_1/branch1/conv3_1x1" 4008 | param{ 4009 | lr_mult: 1 4010 | decay_mult: 1 4011 | } 4012 | convolution_param { 4013 | num_output: 1024 4014 | pad: 0 4015 | kernel_size: 1 4016 | stride: 1 4017 | bias_term: false 4018 | } 4019 | } 4020 | 4021 | layer{ 4022 | name: "AttentionC_1/branch1/conv3_1x1/bn" 4023 | type: "BN" 4024 | bottom: "AttentionC_1/branch1/conv3_1x1" 4025 | top: "AttentionC_1/branch1/conv3_1x1/bn" 4026 | bn_param { 4027 | frozen: true 4028 | } 4029 | } 4030 | 4031 | layer{ 4032 | name: "AttentionC_1" 4033 | type: "Eltwise" 4034 | bottom: "AttentionC_1/branch1/conv3_1x1/bn" 4035 | bottom: "AttentionC_1/fusion" 4036 | top: "AttentionC_1" 4037 | eltwise_param { 4038 | operation: SUM 4039 | } 4040 | } 4041 | 4042 | layer{ 4043 | name: "AttentionC_1/relu" 4044 | type: "ReLU" 4045 | bottom: "AttentionC_1" 4046 | top: "AttentionC_1" 4047 | } 4048 | 4049 | layer{ 4050 | name: "post_res_4_1/branch1/conv1_1x1" 4051 | type: "Convolution" 4052 | bottom: "AttentionC_1" 4053 | top: "post_res_4_1/branch1/conv1_1x1" 4054 | param{ 4055 | lr_mult: 1 4056 | decay_mult: 1 4057 | } 4058 | convolution_param { 4059 | num_output: 1024 4060 | pad: 0 4061 | kernel_size: 1 4062 | stride: 1 4063 | bias_term: false 4064 | } 4065 | } 4066 | 4067 | layer{ 4068 | name: "post_res_4_1/branch1/conv1_1x1/bn" 4069 | type: "BN" 4070 | bottom: "post_res_4_1/branch1/conv1_1x1" 4071 | top: "post_res_4_1/branch1/conv1_1x1/bn" 4072 | bn_param { 4073 | frozen: true 4074 | } 4075 | } 4076 | 4077 | layer{ 4078 | name: "post_res_4_1/branch1/conv1_1x1/bn/relu" 4079 | type: "ReLU" 4080 | bottom: "post_res_4_1/branch1/conv1_1x1/bn" 4081 | top: "post_res_4_1/branch1/conv1_1x1/bn" 4082 | } 4083 | 4084 | layer{ 4085 | name: "post_res_4_1/branch1/conv2_3x3" 4086 | type: "Convolution" 4087 | bottom: "post_res_4_1/branch1/conv1_1x1/bn" 4088 | top: "post_res_4_1/branch1/conv2_3x3" 4089 | param{ 4090 | lr_mult: 1 4091 | decay_mult: 1 4092 | } 4093 | convolution_param { 4094 | num_output: 1024 4095 | pad: 1 4096 | kernel_size: 3 4097 | stride: 2 4098 | group: 32 4099 | bias_term: false 4100 | } 4101 | } 4102 | 4103 | layer{ 4104 | name: "post_res_4_1/branch1/conv2_3x3/bn" 4105 | type: "BN" 4106 | bottom: "post_res_4_1/branch1/conv2_3x3" 4107 | top: "post_res_4_1/branch1/conv2_3x3/bn" 4108 | bn_param { 4109 | frozen: true 4110 | } 4111 | } 4112 | 4113 | layer{ 4114 | name: "post_res_4_1/branch1/conv2_3x3/bn/relu" 4115 | type: "ReLU" 4116 | bottom: "post_res_4_1/branch1/conv2_3x3/bn" 4117 | top: "post_res_4_1/branch1/conv2_3x3/bn" 4118 | } 4119 | 4120 | layer{ 4121 | name: "post_res_4_1/branch1/conv3_1x1" 4122 | type: "Convolution" 4123 | bottom: "post_res_4_1/branch1/conv2_3x3/bn" 4124 | top: "post_res_4_1/branch1/conv3_1x1" 4125 | param{ 4126 | lr_mult: 1 4127 | decay_mult: 1 4128 | } 4129 | convolution_param { 4130 | num_output: 2048 4131 | pad: 0 4132 | kernel_size: 1 4133 | stride: 1 4134 | bias_term: false 4135 | } 4136 | } 4137 | 4138 | layer{ 4139 | name: "post_res_4_1/branch1/conv3_1x1/bn" 4140 | type: "BN" 4141 | bottom: "post_res_4_1/branch1/conv3_1x1" 4142 | top: "post_res_4_1/branch1/conv3_1x1/bn" 4143 | bn_param { 4144 | frozen: true 4145 | } 4146 | } 4147 | 4148 | layer{ 4149 | name: "post_res_4_1/branch2/conv1_1x1" 4150 | type: "Convolution" 4151 | bottom: "AttentionC_1" 4152 | top: "post_res_4_1/branch2/conv1_1x1" 4153 | param{ 4154 | lr_mult: 1 4155 | decay_mult: 1 4156 | } 4157 | convolution_param { 4158 | num_output: 2048 4159 | pad: 0 4160 | kernel_size: 1 4161 | stride: 2 4162 | bias_term: false 4163 | } 4164 | } 4165 | 4166 | layer{ 4167 | name: "post_res_4_1/branch2/conv1_1x1/bn" 4168 | type: "BN" 4169 | bottom: "post_res_4_1/branch2/conv1_1x1" 4170 | top: "post_res_4_1/branch2/conv1_1x1/bn" 4171 | bn_param { 4172 | frozen: true 4173 | } 4174 | } 4175 | 4176 | layer{ 4177 | name: "post_res_4_1" 4178 | type: "Eltwise" 4179 | bottom: "post_res_4_1/branch2/conv1_1x1/bn" 4180 | bottom: "post_res_4_1/branch1/conv3_1x1/bn" 4181 | top: "post_res_4_1" 4182 | eltwise_param { 4183 | operation: SUM 4184 | } 4185 | } 4186 | 4187 | layer{ 4188 | name: "post_res_4_1/relu" 4189 | type: "ReLU" 4190 | bottom: "post_res_4_1" 4191 | top: "post_res_4_1" 4192 | } 4193 | 4194 | layer{ 4195 | name: "post_res_4_2/branch1/conv1_1x1" 4196 | type: "Convolution" 4197 | bottom: "post_res_4_1" 4198 | top: "post_res_4_2/branch1/conv1_1x1" 4199 | param{ 4200 | lr_mult: 1 4201 | decay_mult: 1 4202 | } 4203 | convolution_param { 4204 | num_output: 1024 4205 | pad: 0 4206 | kernel_size: 1 4207 | stride: 1 4208 | bias_term: false 4209 | } 4210 | } 4211 | 4212 | layer{ 4213 | name: "post_res_4_2/branch1/conv1_1x1/bn" 4214 | type: "BN" 4215 | bottom: "post_res_4_2/branch1/conv1_1x1" 4216 | top: "post_res_4_2/branch1/conv1_1x1/bn" 4217 | bn_param { 4218 | frozen: true 4219 | } 4220 | } 4221 | 4222 | layer{ 4223 | name: "post_res_4_2/branch1/conv1_1x1/bn/relu" 4224 | type: "ReLU" 4225 | bottom: "post_res_4_2/branch1/conv1_1x1/bn" 4226 | top: "post_res_4_2/branch1/conv1_1x1/bn" 4227 | } 4228 | 4229 | layer{ 4230 | name: "post_res_4_2/branch1/conv2_3x3" 4231 | type: "Convolution" 4232 | bottom: "post_res_4_2/branch1/conv1_1x1/bn" 4233 | top: "post_res_4_2/branch1/conv2_3x3" 4234 | param{ 4235 | lr_mult: 1 4236 | decay_mult: 1 4237 | } 4238 | convolution_param { 4239 | num_output: 1024 4240 | pad: 1 4241 | kernel_size: 3 4242 | stride: 1 4243 | group: 32 4244 | bias_term: false 4245 | } 4246 | } 4247 | 4248 | layer{ 4249 | name: "post_res_4_2/branch1/conv2_3x3/bn" 4250 | type: "BN" 4251 | bottom: "post_res_4_2/branch1/conv2_3x3" 4252 | top: "post_res_4_2/branch1/conv2_3x3/bn" 4253 | bn_param { 4254 | frozen: true 4255 | } 4256 | } 4257 | 4258 | layer{ 4259 | name: "post_res_4_2/branch1/conv2_3x3/bn/relu" 4260 | type: "ReLU" 4261 | bottom: "post_res_4_2/branch1/conv2_3x3/bn" 4262 | top: "post_res_4_2/branch1/conv2_3x3/bn" 4263 | } 4264 | 4265 | layer{ 4266 | name: "post_res_4_2/branch1/conv3_1x1" 4267 | type: "Convolution" 4268 | bottom: "post_res_4_2/branch1/conv2_3x3/bn" 4269 | top: "post_res_4_2/branch1/conv3_1x1" 4270 | param{ 4271 | lr_mult: 1 4272 | decay_mult: 1 4273 | } 4274 | convolution_param { 4275 | num_output: 2048 4276 | pad: 0 4277 | kernel_size: 1 4278 | stride: 1 4279 | bias_term: false 4280 | } 4281 | } 4282 | 4283 | layer{ 4284 | name: "post_res_4_2/branch1/conv3_1x1/bn" 4285 | type: "BN" 4286 | bottom: "post_res_4_2/branch1/conv3_1x1" 4287 | top: "post_res_4_2/branch1/conv3_1x1/bn" 4288 | bn_param { 4289 | frozen: true 4290 | } 4291 | } 4292 | 4293 | layer{ 4294 | name: "post_res_4_2" 4295 | type: "Eltwise" 4296 | bottom: "post_res_4_2/branch1/conv3_1x1/bn" 4297 | bottom: "post_res_4_1" 4298 | top: "post_res_4_2" 4299 | eltwise_param { 4300 | operation: SUM 4301 | } 4302 | } 4303 | 4304 | layer{ 4305 | name: "post_res_4_2/relu" 4306 | type: "ReLU" 4307 | bottom: "post_res_4_2" 4308 | top: "post_res_4_2" 4309 | } 4310 | 4311 | layer{ 4312 | name: "post_res_4_3/branch1/conv1_1x1" 4313 | type: "Convolution" 4314 | bottom: "post_res_4_2" 4315 | top: "post_res_4_3/branch1/conv1_1x1" 4316 | param{ 4317 | lr_mult: 1 4318 | decay_mult: 1 4319 | } 4320 | convolution_param { 4321 | num_output: 1024 4322 | pad: 0 4323 | kernel_size: 1 4324 | stride: 1 4325 | bias_term: false 4326 | } 4327 | } 4328 | 4329 | layer{ 4330 | name: "post_res_4_3/branch1/conv1_1x1/bn" 4331 | type: "BN" 4332 | bottom: "post_res_4_3/branch1/conv1_1x1" 4333 | top: "post_res_4_3/branch1/conv1_1x1/bn" 4334 | bn_param { 4335 | frozen: true 4336 | } 4337 | } 4338 | 4339 | layer{ 4340 | name: "post_res_4_3/branch1/conv1_1x1/bn/relu" 4341 | type: "ReLU" 4342 | bottom: "post_res_4_3/branch1/conv1_1x1/bn" 4343 | top: "post_res_4_3/branch1/conv1_1x1/bn" 4344 | } 4345 | 4346 | layer{ 4347 | name: "post_res_4_3/branch1/conv2_3x3" 4348 | type: "Convolution" 4349 | bottom: "post_res_4_3/branch1/conv1_1x1/bn" 4350 | top: "post_res_4_3/branch1/conv2_3x3" 4351 | param{ 4352 | lr_mult: 1 4353 | decay_mult: 1 4354 | } 4355 | convolution_param { 4356 | num_output: 1024 4357 | pad: 1 4358 | kernel_size: 3 4359 | stride: 1 4360 | group: 32 4361 | bias_term: false 4362 | } 4363 | } 4364 | 4365 | layer{ 4366 | name: "post_res_4_3/branch1/conv2_3x3/bn" 4367 | type: "BN" 4368 | bottom: "post_res_4_3/branch1/conv2_3x3" 4369 | top: "post_res_4_3/branch1/conv2_3x3/bn" 4370 | bn_param { 4371 | frozen: true 4372 | } 4373 | } 4374 | 4375 | layer{ 4376 | name: "post_res_4_3/branch1/conv2_3x3/bn/relu" 4377 | type: "ReLU" 4378 | bottom: "post_res_4_3/branch1/conv2_3x3/bn" 4379 | top: "post_res_4_3/branch1/conv2_3x3/bn" 4380 | } 4381 | 4382 | layer{ 4383 | name: "post_res_4_3/branch1/conv3_1x1" 4384 | type: "Convolution" 4385 | bottom: "post_res_4_3/branch1/conv2_3x3/bn" 4386 | top: "post_res_4_3/branch1/conv3_1x1" 4387 | param{ 4388 | lr_mult: 1 4389 | decay_mult: 1 4390 | } 4391 | convolution_param { 4392 | num_output: 2048 4393 | pad: 0 4394 | kernel_size: 1 4395 | stride: 1 4396 | bias_term: false 4397 | } 4398 | } 4399 | 4400 | layer{ 4401 | name: "post_res_4_3/branch1/conv3_1x1/bn" 4402 | type: "BN" 4403 | bottom: "post_res_4_3/branch1/conv3_1x1" 4404 | top: "post_res_4_3/branch1/conv3_1x1/bn" 4405 | bn_param { 4406 | frozen: true 4407 | } 4408 | } 4409 | 4410 | layer{ 4411 | name: "post_res_4_3" 4412 | type: "Eltwise" 4413 | bottom: "post_res_4_3/branch1/conv3_1x1/bn" 4414 | bottom: "post_res_4_2" 4415 | top: "post_res_4_3" 4416 | eltwise_param { 4417 | operation: SUM 4418 | } 4419 | } 4420 | 4421 | layer{ 4422 | name: "post_res_4_3/relu" 4423 | type: "ReLU" 4424 | bottom: "post_res_4_3" 4425 | top: "post_res_4_3" 4426 | } 4427 | 4428 | layer{ 4429 | name: "ave_pool" 4430 | type: "Pooling" 4431 | bottom: "post_res_4_3" 4432 | top: "ave_pool" 4433 | pooling_param { 4434 | pool: AVE 4435 | kernel_size: 7 4436 | stride: 1 4437 | } 4438 | } 4439 | 4440 | layer{ 4441 | name: "classifier" 4442 | type: "InnerProduct" 4443 | bottom: "ave_pool" 4444 | top: "classifier" 4445 | inner_product_param { 4446 | num_output: 1000 4447 | } 4448 | } 4449 | 4450 | layer{ 4451 | name: "cls" 4452 | type: "Softmax" 4453 | bottom: "classifier" 4454 | top: "cls" 4455 | } 4456 | 4457 | --------------------------------------------------------------------------------