├── FoF-TSN.prototxt ├── FoF.prototxt ├── README.md ├── SBN-FDP.prototxt ├── TSN-FDP.prototxt ├── fractalnet-AvgPool.prototxt ├── fractalnet.prototxt ├── freeze_drop_path.cpp ├── freeze_drop_path.cu ├── freeze_drop_path.hpp └── solver.prototxt /FoF.prototxt: -------------------------------------------------------------------------------- 1 | name: "FractalOfFractalNet" 2 | layer { 3 | name: "cifar" 4 | type: "Data" 5 | top: "data" 6 | top: "label" 7 | include { 8 | phase: TRAIN 9 | } 10 | transform_param { 11 | mirror: true 12 | mean_file: "examples/cifar10/mean.binaryproto" 13 | # crop_size: 32 14 | # mean_value: 126 15 | # mean_value: 126 16 | # mean_value: 126 17 | } 18 | data_param { 19 | source: "examples/cifar10/cifar10_train_lmdb" 20 | batch_size: 25 21 | backend: LMDB 22 | } 23 | } 24 | layer { 25 | name: "cifar" 26 | type: "Data" 27 | top: "data" 28 | top: "label" 29 | include { 30 | phase: TEST 31 | } 32 | transform_param { 33 | mean_file: "examples/cifar10/mean.binaryproto" 34 | mirror: false 35 | # shuffle: true 36 | # crop_size: 32 37 | } 38 | data_param { 39 | source: "examples/cifar10/cifar10_test_lmdb" 40 | batch_size: 10 41 | backend: LMDB 42 | } 43 | } 44 | # Input size: 32 45 | layer { 46 | bottom: "data" 47 | top: "conv2_0" 48 | name: "conv2_0" 49 | param { 50 | lr_mult: 1 51 | decay_mult: 1 52 | name: "conv2_0" 53 | } 54 | param { 55 | lr_mult: 2 56 | decay_mult: 0 57 | name: "conv2_0_b" 58 | } 59 | type: "Convolution" 60 | convolution_param { 61 | num_output: 64 62 | pad: 1 63 | kernel_size: 3 64 | weight_filler { 65 | type: "xavier" 66 | } 67 | bias_filler { 68 | type: "constant" 69 | } 70 | } 71 | } 72 | layer { 73 | name: "dropout_conv2_0" 74 | type: "Dropout" 75 | bottom: "conv2_0" 76 | top: "conv2_0" 77 | dropout_param { 78 | dropout_ratio: 0.1 79 | } 80 | } 81 | layer { 82 | name: "batch_conv2_0" 83 | type: "BatchNorm" 84 | bottom: "conv2_0" 85 | top: "conv2_0" 86 | param { lr_mult: 0 } 87 | param { lr_mult: 0 } 88 | param { lr_mult: 0 } 89 | } 90 | layer { 91 | name: "relu_conv2_0" 92 | type: "ReLU" 93 | bottom: "conv2_0" 94 | top: "conv2_0" 95 | } 96 | layer { 97 | bottom: "conv2_0" 98 | top: "conv2_1" 99 | name: "conv2_1" 100 | param { 101 | lr_mult: 1 102 | decay_mult: 1 103 | name: "conv2_1" 104 | } 105 | param { 106 | lr_mult: 2 107 | decay_mult: 0 108 | name: "conv2_1_b" 109 | } 110 | type: "Convolution" 111 | convolution_param { 112 | num_output: 64 113 | pad: 1 114 | kernel_size: 3 115 | weight_filler { 116 | type: "xavier" 117 | } 118 | bias_filler { 119 | type: "constant" 120 | } 121 | } 122 | } 123 | layer { 124 | name: "dropout_conv2_1" 125 | type: "Dropout" 126 | bottom: "conv2_1" 127 | top: "conv2_1" 128 | dropout_param { 129 | dropout_ratio: 0.1 130 | } 131 | } 132 | layer { 133 | name: "batch_conv2_1" 134 | type: "BatchNorm" 135 | bottom: "conv2_1" 136 | top: "conv2_1" 137 | param { lr_mult: 0 } 138 | param { lr_mult: 0 } 139 | param { lr_mult: 0 } 140 | } 141 | layer { 142 | name: "relu_conv2_1" 143 | type: "ReLU" 144 | bottom: "conv2_1" 145 | top: "conv2_1" 146 | } 147 | layer { 148 | bottom: "data" 149 | top: "conv1_0" 150 | name: "conv1_0" 151 | param { 152 | lr_mult: 1 153 | decay_mult: 1 154 | name: "conv1_0" 155 | } 156 | param { 157 | lr_mult: 2 158 | decay_mult: 0 159 | name: "conv1_0_b" 160 | } 161 | type: "Convolution" 162 | convolution_param { 163 | num_output: 64 164 | pad: 1 165 | kernel_size: 3 166 | weight_filler { 167 | type: "xavier" 168 | } 169 | bias_filler { 170 | type: "constant" 171 | } 172 | } 173 | } 174 | layer { 175 | name: "dropout_conv1_0" 176 | type: "Dropout" 177 | bottom: "conv1_0" 178 | top: "conv1_0" 179 | dropout_param { 180 | dropout_ratio: 0.1 181 | } 182 | } 183 | layer { 184 | name: "batch_conv1_0" 185 | type: "BatchNorm" 186 | bottom: "conv1_0" 187 | top: "conv1_0" 188 | param { lr_mult: 0 } 189 | param { lr_mult: 0 } 190 | param { lr_mult: 0 } 191 | } 192 | layer { 193 | name: "relu_conv1_0" 194 | type: "ReLU" 195 | bottom: "conv1_0" 196 | top: "conv1_0" 197 | } 198 | layer { 199 | name: "join_conv2_1_plus" 200 | type: "FractalJoin" 201 | bottom: "conv1_0" 202 | bottom: "conv2_1" 203 | top: "conv2_1_plus" 204 | fractal_join_param { 205 | drop_path_ratio: 0.15 206 | } 207 | } 208 | layer { 209 | bottom: "conv2_1_plus" 210 | top: "conv2_2" 211 | name: "conv2_2" 212 | param { 213 | lr_mult: 1 214 | decay_mult: 1 215 | name: "conv2_2" 216 | } 217 | param { 218 | lr_mult: 2 219 | decay_mult: 0 220 | name: "conv2_2_b" 221 | } 222 | type: "Convolution" 223 | convolution_param { 224 | num_output: 64 225 | pad: 1 226 | kernel_size: 3 227 | weight_filler { 228 | type: "xavier" 229 | } 230 | bias_filler { 231 | type: "constant" 232 | } 233 | } 234 | } 235 | layer { 236 | name: "dropout_conv2_2" 237 | type: "Dropout" 238 | bottom: "conv2_2" 239 | top: "conv2_2" 240 | dropout_param { 241 | dropout_ratio: 0.1 242 | } 243 | } 244 | layer { 245 | name: "batch_conv2_2" 246 | type: "BatchNorm" 247 | bottom: "conv2_2" 248 | top: "conv2_2" 249 | param { lr_mult: 0 } 250 | param { lr_mult: 0 } 251 | param { lr_mult: 0 } 252 | } 253 | layer { 254 | name: "relu_conv2_2" 255 | type: "ReLU" 256 | bottom: "conv2_2" 257 | top: "conv2_2" 258 | } 259 | layer { 260 | bottom: "conv2_2" 261 | top: "conv2_3" 262 | name: "conv2_3" 263 | param { 264 | lr_mult: 1 265 | decay_mult: 1 266 | name: "conv2_3" 267 | } 268 | param { 269 | lr_mult: 2 270 | decay_mult: 0 271 | name: "conv2_3_b" 272 | } 273 | type: "Convolution" 274 | convolution_param { 275 | num_output: 64 276 | pad: 1 277 | kernel_size: 3 278 | weight_filler { 279 | type: "xavier" 280 | } 281 | bias_filler { 282 | type: "constant" 283 | } 284 | } 285 | } 286 | layer { 287 | name: "dropout_conv2_3" 288 | type: "Dropout" 289 | bottom: "conv2_3" 290 | top: "conv2_3" 291 | dropout_param { 292 | dropout_ratio: 0.1 293 | } 294 | } 295 | layer { 296 | name: "batch_conv2_3" 297 | type: "BatchNorm" 298 | bottom: "conv2_3" 299 | top: "conv2_3" 300 | param { lr_mult: 0 } 301 | param { lr_mult: 0 } 302 | param { lr_mult: 0 } 303 | } 304 | layer { 305 | name: "relu_conv2_3" 306 | type: "ReLU" 307 | bottom: "conv2_3" 308 | top: "conv2_3" 309 | } 310 | layer { 311 | bottom: "conv2_1_plus" 312 | top: "conv1_1" 313 | name: "conv1_1" 314 | param { 315 | lr_mult: 1 316 | decay_mult: 1 317 | name: "conv1_1" 318 | } 319 | param { 320 | lr_mult: 2 321 | decay_mult: 0 322 | name: "conv1_1_b" 323 | } 324 | type: "Convolution" 325 | convolution_param { 326 | num_output: 64 327 | pad: 1 328 | kernel_size: 3 329 | weight_filler { 330 | type: "xavier" 331 | } 332 | bias_filler { 333 | type: "constant" 334 | } 335 | } 336 | } 337 | layer { 338 | name: "dropout_conv1_1" 339 | type: "Dropout" 340 | bottom: "conv1_1" 341 | top: "conv1_1" 342 | dropout_param { 343 | dropout_ratio: 0.1 344 | } 345 | } 346 | layer { 347 | name: "batch_conv1_1" 348 | type: "BatchNorm" 349 | bottom: "conv1_1" 350 | top: "conv1_1" 351 | param { lr_mult: 0 } 352 | param { lr_mult: 0 } 353 | param { lr_mult: 0 } 354 | } 355 | layer { 356 | name: "relu_conv1_1" 357 | type: "ReLU" 358 | bottom: "conv1_1" 359 | top: "conv1_1" 360 | } 361 | layer { 362 | bottom: "data" 363 | top: "conv0_0" 364 | name: "conv0_0" 365 | param { 366 | lr_mult: 1 367 | decay_mult: 1 368 | name: "conv0_0" 369 | } 370 | param { 371 | lr_mult: 2 372 | decay_mult: 0 373 | name: "conv0_0_b" 374 | } 375 | type: "Convolution" 376 | convolution_param { 377 | num_output: 64 378 | pad: 1 379 | kernel_size: 3 380 | weight_filler { 381 | type: "xavier" 382 | } 383 | bias_filler { 384 | type: "constant" 385 | } 386 | } 387 | } 388 | layer { 389 | name: "dropout_conv0_0" 390 | type: "Dropout" 391 | bottom: "conv0_0" 392 | top: "conv0_0" 393 | dropout_param { 394 | dropout_ratio: 0.1 395 | } 396 | } 397 | layer { 398 | name: "batch_conv0_0" 399 | type: "BatchNorm" 400 | bottom: "conv0_0" 401 | top: "conv0_0" 402 | param { lr_mult: 0 } 403 | param { lr_mult: 0 } 404 | param { lr_mult: 0 } 405 | } 406 | layer { 407 | name: "relu_conv0_0" 408 | type: "ReLU" 409 | bottom: "conv0_0" 410 | top: "conv0_0" 411 | } 412 | layer { 413 | bottom: "conv0_0" 414 | top: "pool0_0" 415 | name: "pool0_0" 416 | type: "Pooling" 417 | pooling_param { 418 | pool: MAX 419 | kernel_size: 2 420 | stride: 2 421 | } 422 | } 423 | layer { 424 | bottom: "conv1_1" 425 | top: "pool1_1" 426 | name: "pool1_1" 427 | type: "Pooling" 428 | pooling_param { 429 | pool: MAX 430 | kernel_size: 2 431 | stride: 2 432 | } 433 | } 434 | layer { 435 | bottom: "conv2_3" 436 | top: "pool2_3" 437 | name: "pool2_3" 438 | type: "Pooling" 439 | pooling_param { 440 | pool: MAX 441 | kernel_size: 2 442 | stride: 2 443 | } 444 | } 445 | layer { 446 | name: "join_pool2_3_plus" 447 | type: "FractalJoin" 448 | bottom: "pool0_0" 449 | bottom: "pool1_1" 450 | bottom: "pool2_3" 451 | top: "pool2_3_plus" 452 | fractal_join_param { 453 | drop_path_ratio: 0.15 454 | } 455 | } 456 | # Reduction: 1, spatial size: 16 457 | layer { 458 | bottom: "pool2_3_plus" 459 | top: "conv2_4" 460 | name: "conv2_4" 461 | param { 462 | lr_mult: 1 463 | decay_mult: 1 464 | name: "conv2_4" 465 | } 466 | param { 467 | lr_mult: 2 468 | decay_mult: 0 469 | name: "conv2_4_b" 470 | } 471 | type: "Convolution" 472 | convolution_param { 473 | num_output: 128 474 | pad: 1 475 | kernel_size: 3 476 | weight_filler { 477 | type: "xavier" 478 | } 479 | bias_filler { 480 | type: "constant" 481 | } 482 | } 483 | } 484 | layer { 485 | name: "dropout_conv2_4" 486 | type: "Dropout" 487 | bottom: "conv2_4" 488 | top: "conv2_4" 489 | dropout_param { 490 | dropout_ratio: 0.2 491 | } 492 | } 493 | layer { 494 | name: "batch_conv2_4" 495 | type: "BatchNorm" 496 | bottom: "conv2_4" 497 | top: "conv2_4" 498 | param { lr_mult: 0 } 499 | param { lr_mult: 0 } 500 | param { lr_mult: 0 } 501 | } 502 | layer { 503 | name: "relu_conv2_4" 504 | type: "ReLU" 505 | bottom: "conv2_4" 506 | top: "conv2_4" 507 | } 508 | layer { 509 | bottom: "conv2_4" 510 | top: "conv2_5" 511 | name: "conv2_5" 512 | param { 513 | lr_mult: 1 514 | decay_mult: 1 515 | name: "conv2_5" 516 | } 517 | param { 518 | lr_mult: 2 519 | decay_mult: 0 520 | name: "conv2_5_b" 521 | } 522 | type: "Convolution" 523 | convolution_param { 524 | num_output: 128 525 | pad: 1 526 | kernel_size: 3 527 | weight_filler { 528 | type: "xavier" 529 | } 530 | bias_filler { 531 | type: "constant" 532 | } 533 | } 534 | } 535 | layer { 536 | name: "dropout_conv2_5" 537 | type: "Dropout" 538 | bottom: "conv2_5" 539 | top: "conv2_5" 540 | dropout_param { 541 | dropout_ratio: 0.2 542 | } 543 | } 544 | layer { 545 | name: "batch_conv2_5" 546 | type: "BatchNorm" 547 | bottom: "conv2_5" 548 | top: "conv2_5" 549 | param { lr_mult: 0 } 550 | param { lr_mult: 0 } 551 | param { lr_mult: 0 } 552 | } 553 | layer { 554 | name: "relu_conv2_5" 555 | type: "ReLU" 556 | bottom: "conv2_5" 557 | top: "conv2_5" 558 | } 559 | layer { 560 | bottom: "pool2_3_plus" 561 | top: "conv1_2" 562 | name: "conv1_2" 563 | param { 564 | lr_mult: 1 565 | decay_mult: 1 566 | name: "conv1_2" 567 | } 568 | param { 569 | lr_mult: 2 570 | decay_mult: 0 571 | name: "conv1_2_b" 572 | } 573 | type: "Convolution" 574 | convolution_param { 575 | num_output: 128 576 | pad: 1 577 | kernel_size: 3 578 | weight_filler { 579 | type: "xavier" 580 | } 581 | bias_filler { 582 | type: "constant" 583 | } 584 | } 585 | } 586 | layer { 587 | name: "dropout_conv1_2" 588 | type: "Dropout" 589 | bottom: "conv1_2" 590 | top: "conv1_2" 591 | dropout_param { 592 | dropout_ratio: 0.2 593 | } 594 | } 595 | layer { 596 | name: "batch_conv1_2" 597 | type: "BatchNorm" 598 | bottom: "conv1_2" 599 | top: "conv1_2" 600 | param { lr_mult: 0 } 601 | param { lr_mult: 0 } 602 | param { lr_mult: 0 } 603 | } 604 | layer { 605 | name: "relu_conv1_2" 606 | type: "ReLU" 607 | bottom: "conv1_2" 608 | top: "conv1_2" 609 | } 610 | layer { 611 | name: "join_conv2_5_plus" 612 | type: "FractalJoin" 613 | bottom: "conv1_2" 614 | bottom: "conv2_5" 615 | top: "conv2_5_plus" 616 | fractal_join_param { 617 | drop_path_ratio: 0.15 618 | } 619 | } 620 | layer { 621 | bottom: "conv2_5_plus" 622 | top: "conv2_6" 623 | name: "conv2_6" 624 | param { 625 | lr_mult: 1 626 | decay_mult: 1 627 | name: "conv2_6" 628 | } 629 | param { 630 | lr_mult: 2 631 | decay_mult: 0 632 | name: "conv2_6_b" 633 | } 634 | type: "Convolution" 635 | convolution_param { 636 | num_output: 128 637 | pad: 1 638 | kernel_size: 3 639 | weight_filler { 640 | type: "xavier" 641 | } 642 | bias_filler { 643 | type: "constant" 644 | } 645 | } 646 | } 647 | layer { 648 | name: "dropout_conv2_6" 649 | type: "Dropout" 650 | bottom: "conv2_6" 651 | top: "conv2_6" 652 | dropout_param { 653 | dropout_ratio: 0.2 654 | } 655 | } 656 | layer { 657 | name: "batch_conv2_6" 658 | type: "BatchNorm" 659 | bottom: "conv2_6" 660 | top: "conv2_6" 661 | param { lr_mult: 0 } 662 | param { lr_mult: 0 } 663 | param { lr_mult: 0 } 664 | } 665 | layer { 666 | name: "relu_conv2_6" 667 | type: "ReLU" 668 | bottom: "conv2_6" 669 | top: "conv2_6" 670 | } 671 | layer { 672 | bottom: "conv2_6" 673 | top: "conv2_7" 674 | name: "conv2_7" 675 | param { 676 | lr_mult: 1 677 | decay_mult: 1 678 | name: "conv2_7" 679 | } 680 | param { 681 | lr_mult: 2 682 | decay_mult: 0 683 | name: "conv2_7_b" 684 | } 685 | type: "Convolution" 686 | convolution_param { 687 | num_output: 128 688 | pad: 1 689 | kernel_size: 3 690 | weight_filler { 691 | type: "xavier" 692 | } 693 | bias_filler { 694 | type: "constant" 695 | } 696 | } 697 | } 698 | layer { 699 | name: "dropout_conv2_7" 700 | type: "Dropout" 701 | bottom: "conv2_7" 702 | top: "conv2_7" 703 | dropout_param { 704 | dropout_ratio: 0.2 705 | } 706 | } 707 | layer { 708 | name: "batch_conv2_7" 709 | type: "BatchNorm" 710 | bottom: "conv2_7" 711 | top: "conv2_7" 712 | param { lr_mult: 0 } 713 | param { lr_mult: 0 } 714 | param { lr_mult: 0 } 715 | } 716 | layer { 717 | name: "relu_conv2_7" 718 | type: "ReLU" 719 | bottom: "conv2_7" 720 | top: "conv2_7" 721 | } 722 | layer { 723 | bottom: "conv2_5_plus" 724 | top: "conv1_3" 725 | name: "conv1_3" 726 | param { 727 | lr_mult: 1 728 | decay_mult: 1 729 | name: "conv1_3" 730 | } 731 | param { 732 | lr_mult: 2 733 | decay_mult: 0 734 | name: "conv1_3_b" 735 | } 736 | type: "Convolution" 737 | convolution_param { 738 | num_output: 128 739 | pad: 1 740 | kernel_size: 3 741 | weight_filler { 742 | type: "xavier" 743 | } 744 | bias_filler { 745 | type: "constant" 746 | } 747 | } 748 | } 749 | layer { 750 | name: "dropout_conv1_3" 751 | type: "Dropout" 752 | bottom: "conv1_3" 753 | top: "conv1_3" 754 | dropout_param { 755 | dropout_ratio: 0.2 756 | } 757 | } 758 | layer { 759 | name: "batch_conv1_3" 760 | type: "BatchNorm" 761 | bottom: "conv1_3" 762 | top: "conv1_3" 763 | param { lr_mult: 0 } 764 | param { lr_mult: 0 } 765 | param { lr_mult: 0 } 766 | } 767 | layer { 768 | name: "relu_conv1_3" 769 | type: "ReLU" 770 | bottom: "conv1_3" 771 | top: "conv1_3" 772 | } 773 | layer { 774 | bottom: "pool2_3_plus" 775 | top: "conv0_1" 776 | name: "conv0_1" 777 | param { 778 | lr_mult: 1 779 | decay_mult: 1 780 | name: "conv0_1" 781 | } 782 | param { 783 | lr_mult: 2 784 | decay_mult: 0 785 | name: "conv0_1_b" 786 | } 787 | type: "Convolution" 788 | convolution_param { 789 | num_output: 128 790 | pad: 1 791 | kernel_size: 3 792 | weight_filler { 793 | type: "xavier" 794 | } 795 | bias_filler { 796 | type: "constant" 797 | } 798 | } 799 | } 800 | layer { 801 | name: "dropout_conv0_1" 802 | type: "Dropout" 803 | bottom: "conv0_1" 804 | top: "conv0_1" 805 | dropout_param { 806 | dropout_ratio: 0.2 807 | } 808 | } 809 | layer { 810 | name: "batch_conv0_1" 811 | type: "BatchNorm" 812 | bottom: "conv0_1" 813 | top: "conv0_1" 814 | param { lr_mult: 0 } 815 | param { lr_mult: 0 } 816 | param { lr_mult: 0 } 817 | } 818 | layer { 819 | name: "relu_conv0_1" 820 | type: "ReLU" 821 | bottom: "conv0_1" 822 | top: "conv0_1" 823 | } 824 | layer { 825 | bottom: "conv0_1" 826 | top: "pool0_1" 827 | name: "pool0_1" 828 | type: "Pooling" 829 | pooling_param { 830 | pool: MAX 831 | kernel_size: 2 832 | stride: 2 833 | } 834 | } 835 | layer { 836 | bottom: "conv1_3" 837 | top: "pool1_3" 838 | name: "pool1_3" 839 | type: "Pooling" 840 | pooling_param { 841 | pool: MAX 842 | kernel_size: 2 843 | stride: 2 844 | } 845 | } 846 | layer { 847 | bottom: "conv2_7" 848 | top: "pool2_7" 849 | name: "pool2_7" 850 | type: "Pooling" 851 | pooling_param { 852 | pool: MAX 853 | kernel_size: 2 854 | stride: 2 855 | } 856 | } 857 | layer { 858 | name: "join_pool2_7_plus" 859 | type: "FractalJoin" 860 | bottom: "pool0_1" 861 | bottom: "pool1_3" 862 | bottom: "pool2_7" 863 | top: "pool2_7_plus" 864 | fractal_join_param { 865 | drop_path_ratio: 0.15 866 | } 867 | } 868 | # Reduction: 2, spatial size: 8 869 | layer { 870 | bottom: "data" 871 | top: "conv2_8" 872 | name: "conv2_8" 873 | param { 874 | lr_mult: 1 875 | decay_mult: 1 876 | name: "conv2_8" 877 | } 878 | param { 879 | lr_mult: 2 880 | decay_mult: 0 881 | name: "conv2_8_b" 882 | } 883 | type: "Convolution" 884 | convolution_param { 885 | num_output: 128 886 | pad: 1 887 | kernel_size: 3 888 | weight_filler { 889 | type: "xavier" 890 | } 891 | bias_filler { 892 | type: "constant" 893 | } 894 | } 895 | } 896 | layer { 897 | name: "dropout_conv2_8" 898 | type: "Dropout" 899 | bottom: "conv2_8" 900 | top: "conv2_8" 901 | dropout_param { 902 | dropout_ratio: 0.1 903 | } 904 | } 905 | layer { 906 | name: "batch_conv2_8" 907 | type: "BatchNorm" 908 | bottom: "conv2_8" 909 | top: "conv2_8" 910 | param { lr_mult: 0 } 911 | param { lr_mult: 0 } 912 | param { lr_mult: 0 } 913 | } 914 | layer { 915 | name: "relu_conv2_8" 916 | type: "ReLU" 917 | bottom: "conv2_8" 918 | top: "conv2_8" 919 | } 920 | layer { 921 | bottom: "conv2_8" 922 | top: "conv2_9" 923 | name: "conv2_9" 924 | param { 925 | lr_mult: 1 926 | decay_mult: 1 927 | name: "conv2_9" 928 | } 929 | param { 930 | lr_mult: 2 931 | decay_mult: 0 932 | name: "conv2_9_b" 933 | } 934 | type: "Convolution" 935 | convolution_param { 936 | num_output: 128 937 | pad: 1 938 | kernel_size: 3 939 | weight_filler { 940 | type: "xavier" 941 | } 942 | bias_filler { 943 | type: "constant" 944 | } 945 | } 946 | } 947 | layer { 948 | name: "dropout_conv2_9" 949 | type: "Dropout" 950 | bottom: "conv2_9" 951 | top: "conv2_9" 952 | dropout_param { 953 | dropout_ratio: 0.1 954 | } 955 | } 956 | layer { 957 | name: "batch_conv2_9" 958 | type: "BatchNorm" 959 | bottom: "conv2_9" 960 | top: "conv2_9" 961 | param { lr_mult: 0 } 962 | param { lr_mult: 0 } 963 | param { lr_mult: 0 } 964 | } 965 | layer { 966 | name: "relu_conv2_9" 967 | type: "ReLU" 968 | bottom: "conv2_9" 969 | top: "conv2_9" 970 | } 971 | layer { 972 | bottom: "data" 973 | top: "conv1_4" 974 | name: "conv1_4" 975 | param { 976 | lr_mult: 1 977 | decay_mult: 1 978 | name: "conv1_4" 979 | } 980 | param { 981 | lr_mult: 2 982 | decay_mult: 0 983 | name: "conv1_4_b" 984 | } 985 | type: "Convolution" 986 | convolution_param { 987 | num_output: 128 988 | pad: 1 989 | kernel_size: 3 990 | weight_filler { 991 | type: "xavier" 992 | } 993 | bias_filler { 994 | type: "constant" 995 | } 996 | } 997 | } 998 | layer { 999 | name: "dropout_conv1_4" 1000 | type: "Dropout" 1001 | bottom: "conv1_4" 1002 | top: "conv1_4" 1003 | dropout_param { 1004 | dropout_ratio: 0.1 1005 | } 1006 | } 1007 | layer { 1008 | name: "batch_conv1_4" 1009 | type: "BatchNorm" 1010 | bottom: "conv1_4" 1011 | top: "conv1_4" 1012 | param { lr_mult: 0 } 1013 | param { lr_mult: 0 } 1014 | param { lr_mult: 0 } 1015 | } 1016 | layer { 1017 | name: "relu_conv1_4" 1018 | type: "ReLU" 1019 | bottom: "conv1_4" 1020 | top: "conv1_4" 1021 | } 1022 | layer { 1023 | name: "join_conv2_9_plus" 1024 | type: "FractalJoin" 1025 | bottom: "conv1_4" 1026 | bottom: "conv2_9" 1027 | top: "conv2_9_plus" 1028 | fractal_join_param { 1029 | drop_path_ratio: 0.15 1030 | } 1031 | } 1032 | layer { 1033 | bottom: "conv2_9_plus" 1034 | top: "conv2_10" 1035 | name: "conv2_10" 1036 | param { 1037 | lr_mult: 1 1038 | decay_mult: 1 1039 | name: "conv2_10" 1040 | } 1041 | param { 1042 | lr_mult: 2 1043 | decay_mult: 0 1044 | name: "conv2_10_b" 1045 | } 1046 | type: "Convolution" 1047 | convolution_param { 1048 | num_output: 128 1049 | pad: 1 1050 | kernel_size: 3 1051 | weight_filler { 1052 | type: "xavier" 1053 | } 1054 | bias_filler { 1055 | type: "constant" 1056 | } 1057 | } 1058 | } 1059 | layer { 1060 | name: "dropout_conv2_10" 1061 | type: "Dropout" 1062 | bottom: "conv2_10" 1063 | top: "conv2_10" 1064 | dropout_param { 1065 | dropout_ratio: 0.1 1066 | } 1067 | } 1068 | layer { 1069 | name: "batch_conv2_10" 1070 | type: "BatchNorm" 1071 | bottom: "conv2_10" 1072 | top: "conv2_10" 1073 | param { lr_mult: 0 } 1074 | param { lr_mult: 0 } 1075 | param { lr_mult: 0 } 1076 | } 1077 | layer { 1078 | name: "relu_conv2_10" 1079 | type: "ReLU" 1080 | bottom: "conv2_10" 1081 | top: "conv2_10" 1082 | } 1083 | layer { 1084 | bottom: "conv2_10" 1085 | top: "conv2_11" 1086 | name: "conv2_11" 1087 | param { 1088 | lr_mult: 1 1089 | decay_mult: 1 1090 | name: "conv2_11" 1091 | } 1092 | param { 1093 | lr_mult: 2 1094 | decay_mult: 0 1095 | name: "conv2_11_b" 1096 | } 1097 | type: "Convolution" 1098 | convolution_param { 1099 | num_output: 128 1100 | pad: 1 1101 | kernel_size: 3 1102 | weight_filler { 1103 | type: "xavier" 1104 | } 1105 | bias_filler { 1106 | type: "constant" 1107 | } 1108 | } 1109 | } 1110 | layer { 1111 | name: "dropout_conv2_11" 1112 | type: "Dropout" 1113 | bottom: "conv2_11" 1114 | top: "conv2_11" 1115 | dropout_param { 1116 | dropout_ratio: 0.1 1117 | } 1118 | } 1119 | layer { 1120 | name: "batch_conv2_11" 1121 | type: "BatchNorm" 1122 | bottom: "conv2_11" 1123 | top: "conv2_11" 1124 | param { lr_mult: 0 } 1125 | param { lr_mult: 0 } 1126 | param { lr_mult: 0 } 1127 | } 1128 | layer { 1129 | name: "relu_conv2_11" 1130 | type: "ReLU" 1131 | bottom: "conv2_11" 1132 | top: "conv2_11" 1133 | } 1134 | layer { 1135 | bottom: "conv2_9_plus" 1136 | top: "conv1_5" 1137 | name: "conv1_5" 1138 | param { 1139 | lr_mult: 1 1140 | decay_mult: 1 1141 | name: "conv1_5" 1142 | } 1143 | param { 1144 | lr_mult: 2 1145 | decay_mult: 0 1146 | name: "conv1_5_b" 1147 | } 1148 | type: "Convolution" 1149 | convolution_param { 1150 | num_output: 128 1151 | pad: 1 1152 | kernel_size: 3 1153 | weight_filler { 1154 | type: "xavier" 1155 | } 1156 | bias_filler { 1157 | type: "constant" 1158 | } 1159 | } 1160 | } 1161 | layer { 1162 | name: "dropout_conv1_5" 1163 | type: "Dropout" 1164 | bottom: "conv1_5" 1165 | top: "conv1_5" 1166 | dropout_param { 1167 | dropout_ratio: 0.1 1168 | } 1169 | } 1170 | layer { 1171 | name: "batch_conv1_5" 1172 | type: "BatchNorm" 1173 | bottom: "conv1_5" 1174 | top: "conv1_5" 1175 | param { lr_mult: 0 } 1176 | param { lr_mult: 0 } 1177 | param { lr_mult: 0 } 1178 | } 1179 | layer { 1180 | name: "relu_conv1_5" 1181 | type: "ReLU" 1182 | bottom: "conv1_5" 1183 | top: "conv1_5" 1184 | } 1185 | layer { 1186 | bottom: "data" 1187 | top: "conv0_2" 1188 | name: "conv0_2" 1189 | param { 1190 | lr_mult: 1 1191 | decay_mult: 1 1192 | name: "conv0_2" 1193 | } 1194 | param { 1195 | lr_mult: 2 1196 | decay_mult: 0 1197 | name: "conv0_2_b" 1198 | } 1199 | type: "Convolution" 1200 | convolution_param { 1201 | num_output: 128 1202 | pad: 1 1203 | kernel_size: 3 1204 | weight_filler { 1205 | type: "xavier" 1206 | } 1207 | bias_filler { 1208 | type: "constant" 1209 | } 1210 | } 1211 | } 1212 | layer { 1213 | name: "dropout_conv0_2" 1214 | type: "Dropout" 1215 | bottom: "conv0_2" 1216 | top: "conv0_2" 1217 | dropout_param { 1218 | dropout_ratio: 0.1 1219 | } 1220 | } 1221 | layer { 1222 | name: "batch_conv0_2" 1223 | type: "BatchNorm" 1224 | bottom: "conv0_2" 1225 | top: "conv0_2" 1226 | param { lr_mult: 0 } 1227 | param { lr_mult: 0 } 1228 | param { lr_mult: 0 } 1229 | } 1230 | layer { 1231 | name: "relu_conv0_2" 1232 | type: "ReLU" 1233 | bottom: "conv0_2" 1234 | top: "conv0_2" 1235 | } 1236 | layer { 1237 | bottom: "conv0_2" 1238 | top: "pool0_2_0" 1239 | name: "pool0_2_0" 1240 | type: "Pooling" 1241 | pooling_param { 1242 | pool: MAX 1243 | kernel_size: 2 1244 | stride: 2 1245 | } 1246 | } 1247 | layer { 1248 | bottom: "pool0_2_0" 1249 | top: "pool0_2_1" 1250 | name: "pool0_2_1" 1251 | type: "Pooling" 1252 | pooling_param { 1253 | pool: MAX 1254 | kernel_size: 2 1255 | stride: 2 1256 | } 1257 | } 1258 | layer { 1259 | bottom: "conv1_5" 1260 | top: "pool1_5_0" 1261 | name: "pool1_5_0" 1262 | type: "Pooling" 1263 | pooling_param { 1264 | pool: MAX 1265 | kernel_size: 2 1266 | stride: 2 1267 | } 1268 | } 1269 | layer { 1270 | bottom: "pool1_5_0" 1271 | top: "pool1_5_1" 1272 | name: "pool1_5_1" 1273 | type: "Pooling" 1274 | pooling_param { 1275 | pool: MAX 1276 | kernel_size: 2 1277 | stride: 2 1278 | } 1279 | } 1280 | layer { 1281 | bottom: "conv2_11" 1282 | top: "pool2_11_0" 1283 | name: "pool2_11_0" 1284 | type: "Pooling" 1285 | pooling_param { 1286 | pool: MAX 1287 | kernel_size: 2 1288 | stride: 2 1289 | } 1290 | } 1291 | layer { 1292 | bottom: "pool2_11_0" 1293 | top: "pool2_11_1" 1294 | name: "pool2_11_1" 1295 | type: "Pooling" 1296 | pooling_param { 1297 | pool: MAX 1298 | kernel_size: 2 1299 | stride: 2 1300 | } 1301 | } 1302 | layer { 1303 | name: "join_pool2_11_plus" 1304 | type: "FractalJoin" 1305 | bottom: "pool0_2_1" 1306 | bottom: "pool1_5_1" 1307 | bottom: "pool2_11_1" 1308 | top: "pool2_11_plus" 1309 | fractal_join_param { 1310 | drop_path_ratio: 0.15 1311 | } 1312 | } 1313 | layer { 1314 | name: "join_extra_mid_join" 1315 | type: "FractalJoin" 1316 | bottom: "pool2_7_plus" 1317 | bottom: "pool2_11_plus" 1318 | top: "extra_mid_join" 1319 | fractal_join_param { 1320 | drop_path_ratio: 0.15 1321 | } 1322 | } 1323 | # Reduction: 3, spatial size: 4 1324 | layer { 1325 | bottom: "extra_mid_join" 1326 | top: "conv2_12" 1327 | name: "conv2_12" 1328 | param { 1329 | lr_mult: 1 1330 | decay_mult: 1 1331 | name: "conv2_12" 1332 | } 1333 | param { 1334 | lr_mult: 2 1335 | decay_mult: 0 1336 | name: "conv2_12_b" 1337 | } 1338 | type: "Convolution" 1339 | convolution_param { 1340 | num_output: 256 1341 | pad: 1 1342 | kernel_size: 3 1343 | weight_filler { 1344 | type: "xavier" 1345 | } 1346 | bias_filler { 1347 | type: "constant" 1348 | } 1349 | } 1350 | } 1351 | layer { 1352 | name: "dropout_conv2_12" 1353 | type: "Dropout" 1354 | bottom: "conv2_12" 1355 | top: "conv2_12" 1356 | dropout_param { 1357 | dropout_ratio: 0.0 1358 | } 1359 | } 1360 | layer { 1361 | name: "batch_conv2_12" 1362 | type: "BatchNorm" 1363 | bottom: "conv2_12" 1364 | top: "conv2_12" 1365 | param { lr_mult: 0 } 1366 | param { lr_mult: 0 } 1367 | param { lr_mult: 0 } 1368 | } 1369 | layer { 1370 | name: "relu_conv2_12" 1371 | type: "ReLU" 1372 | bottom: "conv2_12" 1373 | top: "conv2_12" 1374 | } 1375 | layer { 1376 | bottom: "conv2_12" 1377 | top: "conv2_13" 1378 | name: "conv2_13" 1379 | param { 1380 | lr_mult: 1 1381 | decay_mult: 1 1382 | name: "conv2_13" 1383 | } 1384 | param { 1385 | lr_mult: 2 1386 | decay_mult: 0 1387 | name: "conv2_13_b" 1388 | } 1389 | type: "Convolution" 1390 | convolution_param { 1391 | num_output: 256 1392 | pad: 1 1393 | kernel_size: 3 1394 | weight_filler { 1395 | type: "xavier" 1396 | } 1397 | bias_filler { 1398 | type: "constant" 1399 | } 1400 | } 1401 | } 1402 | layer { 1403 | name: "dropout_conv2_13" 1404 | type: "Dropout" 1405 | bottom: "conv2_13" 1406 | top: "conv2_13" 1407 | dropout_param { 1408 | dropout_ratio: 0.0 1409 | } 1410 | } 1411 | layer { 1412 | name: "batch_conv2_13" 1413 | type: "BatchNorm" 1414 | bottom: "conv2_13" 1415 | top: "conv2_13" 1416 | param { lr_mult: 0 } 1417 | param { lr_mult: 0 } 1418 | param { lr_mult: 0 } 1419 | } 1420 | layer { 1421 | name: "relu_conv2_13" 1422 | type: "ReLU" 1423 | bottom: "conv2_13" 1424 | top: "conv2_13" 1425 | } 1426 | layer { 1427 | bottom: "extra_mid_join" 1428 | top: "conv1_6" 1429 | name: "conv1_6" 1430 | param { 1431 | lr_mult: 1 1432 | decay_mult: 1 1433 | name: "conv1_6" 1434 | } 1435 | param { 1436 | lr_mult: 2 1437 | decay_mult: 0 1438 | name: "conv1_6_b" 1439 | } 1440 | type: "Convolution" 1441 | convolution_param { 1442 | num_output: 256 1443 | pad: 1 1444 | kernel_size: 3 1445 | weight_filler { 1446 | type: "xavier" 1447 | } 1448 | bias_filler { 1449 | type: "constant" 1450 | } 1451 | } 1452 | } 1453 | layer { 1454 | name: "dropout_conv1_6" 1455 | type: "Dropout" 1456 | bottom: "conv1_6" 1457 | top: "conv1_6" 1458 | dropout_param { 1459 | dropout_ratio: 0.3 1460 | } 1461 | } 1462 | layer { 1463 | name: "batch_conv1_6" 1464 | type: "BatchNorm" 1465 | bottom: "conv1_6" 1466 | top: "conv1_6" 1467 | param { lr_mult: 0 } 1468 | param { lr_mult: 0 } 1469 | param { lr_mult: 0 } 1470 | } 1471 | layer { 1472 | name: "relu_conv1_6" 1473 | type: "ReLU" 1474 | bottom: "conv1_6" 1475 | top: "conv1_6" 1476 | } 1477 | layer { 1478 | name: "join_conv2_13_plus" 1479 | type: "FractalJoin" 1480 | bottom: "conv1_6" 1481 | bottom: "conv2_13" 1482 | top: "conv2_13_plus" 1483 | fractal_join_param { 1484 | drop_path_ratio: 0.15 1485 | } 1486 | } 1487 | layer { 1488 | bottom: "conv2_13_plus" 1489 | top: "conv2_14" 1490 | name: "conv2_14" 1491 | param { 1492 | lr_mult: 1 1493 | decay_mult: 1 1494 | name: "conv2_14" 1495 | } 1496 | param { 1497 | lr_mult: 2 1498 | decay_mult: 0 1499 | name: "conv2_14_b" 1500 | } 1501 | type: "Convolution" 1502 | convolution_param { 1503 | num_output: 256 1504 | pad: 1 1505 | kernel_size: 3 1506 | weight_filler { 1507 | type: "xavier" 1508 | } 1509 | bias_filler { 1510 | type: "constant" 1511 | } 1512 | } 1513 | } 1514 | layer { 1515 | name: "dropout_conv2_14" 1516 | type: "Dropout" 1517 | bottom: "conv2_14" 1518 | top: "conv2_14" 1519 | dropout_param { 1520 | dropout_ratio: 0.3 1521 | } 1522 | } 1523 | layer { 1524 | name: "batch_conv2_14" 1525 | type: "BatchNorm" 1526 | bottom: "conv2_14" 1527 | top: "conv2_14" 1528 | param { lr_mult: 0 } 1529 | param { lr_mult: 0 } 1530 | param { lr_mult: 0 } 1531 | } 1532 | layer { 1533 | name: "relu_conv2_14" 1534 | type: "ReLU" 1535 | bottom: "conv2_14" 1536 | top: "conv2_14" 1537 | } 1538 | layer { 1539 | bottom: "conv2_14" 1540 | top: "conv2_15" 1541 | name: "conv2_15" 1542 | param { 1543 | lr_mult: 1 1544 | decay_mult: 1 1545 | name: "conv2_15" 1546 | } 1547 | param { 1548 | lr_mult: 2 1549 | decay_mult: 0 1550 | name: "conv2_15_b" 1551 | } 1552 | type: "Convolution" 1553 | convolution_param { 1554 | num_output: 256 1555 | pad: 1 1556 | kernel_size: 3 1557 | weight_filler { 1558 | type: "xavier" 1559 | } 1560 | bias_filler { 1561 | type: "constant" 1562 | } 1563 | } 1564 | } 1565 | layer { 1566 | name: "dropout_conv2_15" 1567 | type: "Dropout" 1568 | bottom: "conv2_15" 1569 | top: "conv2_15" 1570 | dropout_param { 1571 | dropout_ratio: 0.3 1572 | } 1573 | } 1574 | layer { 1575 | name: "batch_conv2_15" 1576 | type: "BatchNorm" 1577 | bottom: "conv2_15" 1578 | top: "conv2_15" 1579 | param { lr_mult: 0 } 1580 | param { lr_mult: 0 } 1581 | param { lr_mult: 0 } 1582 | } 1583 | layer { 1584 | name: "relu_conv2_15" 1585 | type: "ReLU" 1586 | bottom: "conv2_15" 1587 | top: "conv2_15" 1588 | } 1589 | layer { 1590 | bottom: "conv2_13_plus" 1591 | top: "conv1_7" 1592 | name: "conv1_7" 1593 | param { 1594 | lr_mult: 1 1595 | decay_mult: 1 1596 | name: "conv1_7" 1597 | } 1598 | param { 1599 | lr_mult: 2 1600 | decay_mult: 0 1601 | name: "conv1_7_b" 1602 | } 1603 | type: "Convolution" 1604 | convolution_param { 1605 | num_output: 256 1606 | pad: 1 1607 | kernel_size: 3 1608 | weight_filler { 1609 | type: "xavier" 1610 | } 1611 | bias_filler { 1612 | type: "constant" 1613 | } 1614 | } 1615 | } 1616 | layer { 1617 | name: "dropout_conv1_7" 1618 | type: "Dropout" 1619 | bottom: "conv1_7" 1620 | top: "conv1_7" 1621 | dropout_param { 1622 | dropout_ratio: 0.3 1623 | } 1624 | } 1625 | layer { 1626 | name: "batch_conv1_7" 1627 | type: "BatchNorm" 1628 | bottom: "conv1_7" 1629 | top: "conv1_7" 1630 | param { lr_mult: 0 } 1631 | param { lr_mult: 0 } 1632 | param { lr_mult: 0 } 1633 | } 1634 | layer { 1635 | name: "relu_conv1_7" 1636 | type: "ReLU" 1637 | bottom: "conv1_7" 1638 | top: "conv1_7" 1639 | } 1640 | layer { 1641 | bottom: "extra_mid_join" 1642 | top: "conv0_3" 1643 | name: "conv0_3" 1644 | param { 1645 | lr_mult: 1 1646 | decay_mult: 1 1647 | name: "conv0_3" 1648 | } 1649 | param { 1650 | lr_mult: 2 1651 | decay_mult: 0 1652 | name: "conv0_3_b" 1653 | } 1654 | type: "Convolution" 1655 | convolution_param { 1656 | num_output: 256 1657 | pad: 1 1658 | kernel_size: 3 1659 | weight_filler { 1660 | type: "xavier" 1661 | } 1662 | bias_filler { 1663 | type: "constant" 1664 | } 1665 | } 1666 | } 1667 | layer { 1668 | name: "dropout_conv0_3" 1669 | type: "Dropout" 1670 | bottom: "conv0_3" 1671 | top: "conv0_3" 1672 | dropout_param { 1673 | dropout_ratio: 0.3 1674 | } 1675 | } 1676 | layer { 1677 | name: "batch_conv0_3" 1678 | type: "BatchNorm" 1679 | bottom: "conv0_3" 1680 | top: "conv0_3" 1681 | param { lr_mult: 0 } 1682 | param { lr_mult: 0 } 1683 | param { lr_mult: 0 } 1684 | } 1685 | layer { 1686 | name: "relu_conv0_3" 1687 | type: "ReLU" 1688 | bottom: "conv0_3" 1689 | top: "conv0_3" 1690 | } 1691 | layer { 1692 | bottom: "conv0_3" 1693 | top: "pool0_3" 1694 | name: "pool0_3" 1695 | type: "Pooling" 1696 | pooling_param { 1697 | pool: MAX 1698 | kernel_size: 2 1699 | stride: 2 1700 | } 1701 | } 1702 | layer { 1703 | bottom: "conv1_7" 1704 | top: "pool1_7" 1705 | name: "pool1_7" 1706 | type: "Pooling" 1707 | pooling_param { 1708 | pool: MAX 1709 | kernel_size: 2 1710 | stride: 2 1711 | } 1712 | } 1713 | layer { 1714 | bottom: "conv2_15" 1715 | top: "pool2_15" 1716 | name: "pool2_15" 1717 | type: "Pooling" 1718 | pooling_param { 1719 | pool: MAX 1720 | kernel_size: 2 1721 | stride: 2 1722 | } 1723 | } 1724 | layer { 1725 | name: "join_pool2_15_plus" 1726 | type: "FractalJoin" 1727 | bottom: "pool0_3" 1728 | bottom: "pool1_7" 1729 | bottom: "pool2_15" 1730 | top: "pool2_15_plus" 1731 | fractal_join_param { 1732 | drop_path_ratio: 0.15 1733 | } 1734 | } 1735 | # Reduction: 4, spatial size: 2 1736 | layer { 1737 | bottom: "pool2_15_plus" 1738 | top: "conv2_16" 1739 | name: "conv2_16" 1740 | param { 1741 | lr_mult: 1 1742 | decay_mult: 1 1743 | name: "conv2_16" 1744 | } 1745 | param { 1746 | lr_mult: 2 1747 | decay_mult: 0 1748 | name: "conv2_16_b" 1749 | } 1750 | type: "Convolution" 1751 | convolution_param { 1752 | num_output: 512 1753 | pad: 1 1754 | kernel_size: 3 1755 | weight_filler { 1756 | type: "xavier" 1757 | } 1758 | bias_filler { 1759 | type: "constant" 1760 | } 1761 | } 1762 | } 1763 | layer { 1764 | name: "dropout_conv2_16" 1765 | type: "Dropout" 1766 | bottom: "conv2_16" 1767 | top: "conv2_16" 1768 | dropout_param { 1769 | dropout_ratio: 0.3 1770 | } 1771 | } 1772 | layer { 1773 | name: "batch_conv2_16" 1774 | type: "BatchNorm" 1775 | bottom: "conv2_16" 1776 | top: "conv2_16" 1777 | param { lr_mult: 0 } 1778 | param { lr_mult: 0 } 1779 | param { lr_mult: 0 } 1780 | } 1781 | layer { 1782 | name: "relu_conv2_16" 1783 | type: "ReLU" 1784 | bottom: "conv2_16" 1785 | top: "conv2_16" 1786 | } 1787 | layer { 1788 | bottom: "conv2_16" 1789 | top: "conv2_17" 1790 | name: "conv2_17" 1791 | param { 1792 | lr_mult: 1 1793 | decay_mult: 1 1794 | name: "conv2_17" 1795 | } 1796 | param { 1797 | lr_mult: 2 1798 | decay_mult: 0 1799 | name: "conv2_17_b" 1800 | } 1801 | type: "Convolution" 1802 | convolution_param { 1803 | num_output: 512 1804 | pad: 1 1805 | kernel_size: 3 1806 | weight_filler { 1807 | type: "xavier" 1808 | } 1809 | bias_filler { 1810 | type: "constant" 1811 | } 1812 | } 1813 | } 1814 | layer { 1815 | name: "dropout_conv2_17" 1816 | type: "Dropout" 1817 | bottom: "conv2_17" 1818 | top: "conv2_17" 1819 | dropout_param { 1820 | dropout_ratio: 0.3 1821 | } 1822 | } 1823 | layer { 1824 | name: "batch_conv2_17" 1825 | type: "BatchNorm" 1826 | bottom: "conv2_17" 1827 | top: "conv2_17" 1828 | param { lr_mult: 0 } 1829 | param { lr_mult: 0 } 1830 | param { lr_mult: 0 } 1831 | } 1832 | layer { 1833 | name: "relu_conv2_17" 1834 | type: "ReLU" 1835 | bottom: "conv2_17" 1836 | top: "conv2_17" 1837 | } 1838 | layer { 1839 | bottom: "pool2_15_plus" 1840 | top: "conv1_8" 1841 | name: "conv1_8" 1842 | param { 1843 | lr_mult: 1 1844 | decay_mult: 1 1845 | name: "conv1_8" 1846 | } 1847 | param { 1848 | lr_mult: 2 1849 | decay_mult: 0 1850 | name: "conv1_8_b" 1851 | } 1852 | type: "Convolution" 1853 | convolution_param { 1854 | num_output: 512 1855 | pad: 1 1856 | kernel_size: 3 1857 | weight_filler { 1858 | type: "xavier" 1859 | } 1860 | bias_filler { 1861 | type: "constant" 1862 | } 1863 | } 1864 | } 1865 | layer { 1866 | name: "dropout_conv1_8" 1867 | type: "Dropout" 1868 | bottom: "conv1_8" 1869 | top: "conv1_8" 1870 | dropout_param { 1871 | dropout_ratio: 0.3 1872 | } 1873 | } 1874 | layer { 1875 | name: "batch_conv1_8" 1876 | type: "BatchNorm" 1877 | bottom: "conv1_8" 1878 | top: "conv1_8" 1879 | param { lr_mult: 0 } 1880 | param { lr_mult: 0 } 1881 | param { lr_mult: 0 } 1882 | } 1883 | layer { 1884 | name: "relu_conv1_8" 1885 | type: "ReLU" 1886 | bottom: "conv1_8" 1887 | top: "conv1_8" 1888 | } 1889 | layer { 1890 | name: "join_conv2_17_plus" 1891 | type: "FractalJoin" 1892 | bottom: "conv1_8" 1893 | bottom: "conv2_17" 1894 | top: "conv2_17_plus" 1895 | fractal_join_param { 1896 | drop_path_ratio: 0.15 1897 | } 1898 | } 1899 | layer { 1900 | bottom: "conv2_17_plus" 1901 | top: "conv2_18" 1902 | name: "conv2_18" 1903 | param { 1904 | lr_mult: 1 1905 | decay_mult: 1 1906 | name: "conv2_18" 1907 | } 1908 | param { 1909 | lr_mult: 2 1910 | decay_mult: 0 1911 | name: "conv2_18_b" 1912 | } 1913 | type: "Convolution" 1914 | convolution_param { 1915 | num_output: 512 1916 | pad: 1 1917 | kernel_size: 3 1918 | weight_filler { 1919 | type: "xavier" 1920 | } 1921 | bias_filler { 1922 | type: "constant" 1923 | } 1924 | } 1925 | } 1926 | layer { 1927 | name: "dropout_conv2_18" 1928 | type: "Dropout" 1929 | bottom: "conv2_18" 1930 | top: "conv2_18" 1931 | dropout_param { 1932 | dropout_ratio: 0.3 1933 | } 1934 | } 1935 | layer { 1936 | name: "batch_conv2_18" 1937 | type: "BatchNorm" 1938 | bottom: "conv2_18" 1939 | top: "conv2_18" 1940 | param { lr_mult: 0 } 1941 | param { lr_mult: 0 } 1942 | param { lr_mult: 0 } 1943 | } 1944 | layer { 1945 | name: "relu_conv2_18" 1946 | type: "ReLU" 1947 | bottom: "conv2_18" 1948 | top: "conv2_18" 1949 | } 1950 | layer { 1951 | bottom: "conv2_18" 1952 | top: "conv2_19" 1953 | name: "conv2_19" 1954 | param { 1955 | lr_mult: 1 1956 | decay_mult: 1 1957 | name: "conv2_19" 1958 | } 1959 | param { 1960 | lr_mult: 2 1961 | decay_mult: 0 1962 | name: "conv2_19_b" 1963 | } 1964 | type: "Convolution" 1965 | convolution_param { 1966 | num_output: 512 1967 | pad: 1 1968 | kernel_size: 3 1969 | weight_filler { 1970 | type: "xavier" 1971 | } 1972 | bias_filler { 1973 | type: "constant" 1974 | } 1975 | } 1976 | } 1977 | layer { 1978 | name: "dropout_conv2_19" 1979 | type: "Dropout" 1980 | bottom: "conv2_19" 1981 | top: "conv2_19" 1982 | dropout_param { 1983 | dropout_ratio: 0.3 1984 | } 1985 | } 1986 | layer { 1987 | name: "batch_conv2_19" 1988 | type: "BatchNorm" 1989 | bottom: "conv2_19" 1990 | top: "conv2_19" 1991 | param { lr_mult: 0 } 1992 | param { lr_mult: 0 } 1993 | param { lr_mult: 0 } 1994 | } 1995 | layer { 1996 | name: "relu_conv2_19" 1997 | type: "ReLU" 1998 | bottom: "conv2_19" 1999 | top: "conv2_19" 2000 | } 2001 | layer { 2002 | bottom: "conv2_17_plus" 2003 | top: "conv1_9" 2004 | name: "conv1_9" 2005 | param { 2006 | lr_mult: 1 2007 | decay_mult: 1 2008 | name: "conv1_9" 2009 | } 2010 | param { 2011 | lr_mult: 2 2012 | decay_mult: 0 2013 | name: "conv1_9_b" 2014 | } 2015 | type: "Convolution" 2016 | convolution_param { 2017 | num_output: 512 2018 | pad: 1 2019 | kernel_size: 3 2020 | weight_filler { 2021 | type: "xavier" 2022 | } 2023 | bias_filler { 2024 | type: "constant" 2025 | } 2026 | } 2027 | } 2028 | layer { 2029 | name: "dropout_conv1_9" 2030 | type: "Dropout" 2031 | bottom: "conv1_9" 2032 | top: "conv1_9" 2033 | dropout_param { 2034 | dropout_ratio: 0.3 2035 | } 2036 | } 2037 | layer { 2038 | name: "batch_conv1_9" 2039 | type: "BatchNorm" 2040 | bottom: "conv1_9" 2041 | top: "conv1_9" 2042 | param { lr_mult: 0 } 2043 | param { lr_mult: 0 } 2044 | param { lr_mult: 0 } 2045 | } 2046 | layer { 2047 | name: "relu_conv1_9" 2048 | type: "ReLU" 2049 | bottom: "conv1_9" 2050 | top: "conv1_9" 2051 | } 2052 | layer { 2053 | bottom: "pool2_15_plus" 2054 | top: "conv0_4" 2055 | name: "conv0_4" 2056 | param { 2057 | lr_mult: 1 2058 | decay_mult: 1 2059 | name: "conv0_4" 2060 | } 2061 | param { 2062 | lr_mult: 2 2063 | decay_mult: 0 2064 | name: "conv0_4_b" 2065 | } 2066 | type: "Convolution" 2067 | convolution_param { 2068 | num_output: 512 2069 | pad: 1 2070 | kernel_size: 3 2071 | weight_filler { 2072 | type: "xavier" 2073 | } 2074 | bias_filler { 2075 | type: "constant" 2076 | } 2077 | } 2078 | } 2079 | layer { 2080 | name: "dropout_conv0_4" 2081 | type: "Dropout" 2082 | bottom: "conv0_4" 2083 | top: "conv0_4" 2084 | dropout_param { 2085 | dropout_ratio: 0.3 2086 | } 2087 | } 2088 | layer { 2089 | name: "batch_conv0_4" 2090 | type: "BatchNorm" 2091 | bottom: "conv0_4" 2092 | top: "conv0_4" 2093 | param { lr_mult: 0 } 2094 | param { lr_mult: 0 } 2095 | param { lr_mult: 0 } 2096 | } 2097 | layer { 2098 | name: "relu_conv0_4" 2099 | type: "ReLU" 2100 | bottom: "conv0_4" 2101 | top: "conv0_4" 2102 | } 2103 | layer { 2104 | bottom: "conv0_4" 2105 | top: "pool0_4" 2106 | name: "pool0_4" 2107 | type: "Pooling" 2108 | pooling_param { 2109 | pool: MAX 2110 | kernel_size: 2 2111 | stride: 2 2112 | } 2113 | } 2114 | layer { 2115 | bottom: "conv1_9" 2116 | top: "pool1_9" 2117 | name: "pool1_9" 2118 | type: "Pooling" 2119 | pooling_param { 2120 | pool: MAX 2121 | kernel_size: 2 2122 | stride: 2 2123 | } 2124 | } 2125 | layer { 2126 | bottom: "conv2_19" 2127 | top: "pool2_19" 2128 | name: "pool2_19" 2129 | type: "Pooling" 2130 | pooling_param { 2131 | pool: MAX 2132 | kernel_size: 2 2133 | stride: 2 2134 | } 2135 | } 2136 | layer { 2137 | name: "join_pool2_19_plus" 2138 | type: "FractalJoin" 2139 | bottom: "pool0_4" 2140 | bottom: "pool1_9" 2141 | bottom: "pool2_19" 2142 | top: "pool2_19_plus" 2143 | fractal_join_param { 2144 | drop_path_ratio: 0.15 2145 | } 2146 | } 2147 | # Reduction: 5, spatial size: 1 2148 | layer { 2149 | bottom: "extra_mid_join" 2150 | top: "conv2_20" 2151 | name: "conv2_20" 2152 | param { 2153 | lr_mult: 1 2154 | decay_mult: 1 2155 | name: "conv2_20" 2156 | } 2157 | param { 2158 | lr_mult: 2 2159 | decay_mult: 0 2160 | name: "conv2_20_b" 2161 | } 2162 | type: "Convolution" 2163 | convolution_param { 2164 | num_output: 512 2165 | pad: 1 2166 | kernel_size: 3 2167 | weight_filler { 2168 | type: "xavier" 2169 | } 2170 | bias_filler { 2171 | type: "constant" 2172 | } 2173 | } 2174 | } 2175 | layer { 2176 | name: "dropout_conv2_20" 2177 | type: "Dropout" 2178 | bottom: "conv2_20" 2179 | top: "conv2_20" 2180 | dropout_param { 2181 | dropout_ratio: 0.3 2182 | } 2183 | } 2184 | layer { 2185 | name: "batch_conv2_20" 2186 | type: "BatchNorm" 2187 | bottom: "conv2_20" 2188 | top: "conv2_20" 2189 | param { lr_mult: 0 } 2190 | param { lr_mult: 0 } 2191 | param { lr_mult: 0 } 2192 | } 2193 | layer { 2194 | name: "relu_conv2_20" 2195 | type: "ReLU" 2196 | bottom: "conv2_20" 2197 | top: "conv2_20" 2198 | } 2199 | layer { 2200 | bottom: "conv2_20" 2201 | top: "conv2_21" 2202 | name: "conv2_21" 2203 | param { 2204 | lr_mult: 1 2205 | decay_mult: 1 2206 | name: "conv2_21" 2207 | } 2208 | param { 2209 | lr_mult: 2 2210 | decay_mult: 0 2211 | name: "conv2_21_b" 2212 | } 2213 | type: "Convolution" 2214 | convolution_param { 2215 | num_output: 512 2216 | pad: 1 2217 | kernel_size: 3 2218 | weight_filler { 2219 | type: "xavier" 2220 | } 2221 | bias_filler { 2222 | type: "constant" 2223 | } 2224 | } 2225 | } 2226 | layer { 2227 | name: "dropout_conv2_21" 2228 | type: "Dropout" 2229 | bottom: "conv2_21" 2230 | top: "conv2_21" 2231 | dropout_param { 2232 | dropout_ratio: 0.3 2233 | } 2234 | } 2235 | layer { 2236 | name: "batch_conv2_21" 2237 | type: "BatchNorm" 2238 | bottom: "conv2_21" 2239 | top: "conv2_21" 2240 | param { lr_mult: 0 } 2241 | param { lr_mult: 0 } 2242 | param { lr_mult: 0 } 2243 | } 2244 | layer { 2245 | name: "relu_conv2_21" 2246 | type: "ReLU" 2247 | bottom: "conv2_21" 2248 | top: "conv2_21" 2249 | } 2250 | layer { 2251 | bottom: "extra_mid_join" 2252 | top: "conv1_10" 2253 | name: "conv1_10" 2254 | param { 2255 | lr_mult: 1 2256 | decay_mult: 1 2257 | name: "conv1_10" 2258 | } 2259 | param { 2260 | lr_mult: 2 2261 | decay_mult: 0 2262 | name: "conv1_10_b" 2263 | } 2264 | type: "Convolution" 2265 | convolution_param { 2266 | num_output: 512 2267 | pad: 1 2268 | kernel_size: 3 2269 | weight_filler { 2270 | type: "xavier" 2271 | } 2272 | bias_filler { 2273 | type: "constant" 2274 | } 2275 | } 2276 | } 2277 | layer { 2278 | name: "dropout_conv1_10" 2279 | type: "Dropout" 2280 | bottom: "conv1_10" 2281 | top: "conv1_10" 2282 | dropout_param { 2283 | dropout_ratio: 0.3 2284 | } 2285 | } 2286 | layer { 2287 | name: "batch_conv1_10" 2288 | type: "BatchNorm" 2289 | bottom: "conv1_10" 2290 | top: "conv1_10" 2291 | param { lr_mult: 0 } 2292 | param { lr_mult: 0 } 2293 | param { lr_mult: 0 } 2294 | } 2295 | layer { 2296 | name: "relu_conv1_10" 2297 | type: "ReLU" 2298 | bottom: "conv1_10" 2299 | top: "conv1_10" 2300 | } 2301 | layer { 2302 | name: "join_conv2_21_plus" 2303 | type: "FractalJoin" 2304 | bottom: "conv1_10" 2305 | bottom: "conv2_21" 2306 | top: "conv2_21_plus" 2307 | fractal_join_param { 2308 | drop_path_ratio: 0.15 2309 | } 2310 | } 2311 | layer { 2312 | bottom: "conv2_21_plus" 2313 | top: "conv2_22" 2314 | name: "conv2_22" 2315 | param { 2316 | lr_mult: 1 2317 | decay_mult: 1 2318 | name: "conv2_22" 2319 | } 2320 | param { 2321 | lr_mult: 2 2322 | decay_mult: 0 2323 | name: "conv2_22_b" 2324 | } 2325 | type: "Convolution" 2326 | convolution_param { 2327 | num_output: 512 2328 | pad: 1 2329 | kernel_size: 3 2330 | weight_filler { 2331 | type: "xavier" 2332 | } 2333 | bias_filler { 2334 | type: "constant" 2335 | } 2336 | } 2337 | } 2338 | layer { 2339 | name: "dropout_conv2_22" 2340 | type: "Dropout" 2341 | bottom: "conv2_22" 2342 | top: "conv2_22" 2343 | dropout_param { 2344 | dropout_ratio: 0.3 2345 | } 2346 | } 2347 | layer { 2348 | name: "batch_conv2_22" 2349 | type: "BatchNorm" 2350 | bottom: "conv2_22" 2351 | top: "conv2_22" 2352 | param { lr_mult: 0 } 2353 | param { lr_mult: 0 } 2354 | param { lr_mult: 0 } 2355 | } 2356 | layer { 2357 | name: "relu_conv2_22" 2358 | type: "ReLU" 2359 | bottom: "conv2_22" 2360 | top: "conv2_22" 2361 | } 2362 | layer { 2363 | bottom: "conv2_22" 2364 | top: "conv2_23" 2365 | name: "conv2_23" 2366 | param { 2367 | lr_mult: 1 2368 | decay_mult: 1 2369 | name: "conv2_23" 2370 | } 2371 | param { 2372 | lr_mult: 2 2373 | decay_mult: 0 2374 | name: "conv2_23_b" 2375 | } 2376 | type: "Convolution" 2377 | convolution_param { 2378 | num_output: 512 2379 | pad: 1 2380 | kernel_size: 3 2381 | weight_filler { 2382 | type: "xavier" 2383 | } 2384 | bias_filler { 2385 | type: "constant" 2386 | } 2387 | } 2388 | } 2389 | layer { 2390 | name: "dropout_conv2_23" 2391 | type: "Dropout" 2392 | bottom: "conv2_23" 2393 | top: "conv2_23" 2394 | dropout_param { 2395 | dropout_ratio: 0.3 2396 | } 2397 | } 2398 | layer { 2399 | name: "batch_conv2_23" 2400 | type: "BatchNorm" 2401 | bottom: "conv2_23" 2402 | top: "conv2_23" 2403 | param { lr_mult: 0 } 2404 | param { lr_mult: 0 } 2405 | param { lr_mult: 0 } 2406 | } 2407 | layer { 2408 | name: "relu_conv2_23" 2409 | type: "ReLU" 2410 | bottom: "conv2_23" 2411 | top: "conv2_23" 2412 | } 2413 | layer { 2414 | bottom: "conv2_21_plus" 2415 | top: "conv1_11" 2416 | name: "conv1_11" 2417 | param { 2418 | lr_mult: 1 2419 | decay_mult: 1 2420 | name: "conv1_11" 2421 | } 2422 | param { 2423 | lr_mult: 2 2424 | decay_mult: 0 2425 | name: "conv1_11_b" 2426 | } 2427 | type: "Convolution" 2428 | convolution_param { 2429 | num_output: 512 2430 | pad: 1 2431 | kernel_size: 3 2432 | weight_filler { 2433 | type: "xavier" 2434 | } 2435 | bias_filler { 2436 | type: "constant" 2437 | } 2438 | } 2439 | } 2440 | layer { 2441 | name: "dropout_conv1_11" 2442 | type: "Dropout" 2443 | bottom: "conv1_11" 2444 | top: "conv1_11" 2445 | dropout_param { 2446 | dropout_ratio: 0.3 2447 | } 2448 | } 2449 | layer { 2450 | name: "batch_conv1_11" 2451 | type: "BatchNorm" 2452 | bottom: "conv1_11" 2453 | top: "conv1_11" 2454 | param { lr_mult: 0 } 2455 | param { lr_mult: 0 } 2456 | param { lr_mult: 0 } 2457 | } 2458 | layer { 2459 | name: "relu_conv1_11" 2460 | type: "ReLU" 2461 | bottom: "conv1_11" 2462 | top: "conv1_11" 2463 | } 2464 | layer { 2465 | bottom: "extra_mid_join" 2466 | top: "conv0_5" 2467 | name: "conv0_5" 2468 | param { 2469 | lr_mult: 1 2470 | decay_mult: 1 2471 | name: "conv0_5" 2472 | } 2473 | param { 2474 | lr_mult: 2 2475 | decay_mult: 0 2476 | name: "conv0_5_b" 2477 | } 2478 | type: "Convolution" 2479 | convolution_param { 2480 | num_output: 512 2481 | pad: 1 2482 | kernel_size: 3 2483 | weight_filler { 2484 | type: "xavier" 2485 | } 2486 | bias_filler { 2487 | type: "constant" 2488 | } 2489 | } 2490 | } 2491 | layer { 2492 | name: "dropout_conv0_5" 2493 | type: "Dropout" 2494 | bottom: "conv0_5" 2495 | top: "conv0_5" 2496 | dropout_param { 2497 | dropout_ratio: 0.3 2498 | } 2499 | } 2500 | layer { 2501 | name: "batch_conv0_5" 2502 | type: "BatchNorm" 2503 | bottom: "conv0_5" 2504 | top: "conv0_5" 2505 | param { lr_mult: 0 } 2506 | param { lr_mult: 0 } 2507 | param { lr_mult: 0 } 2508 | } 2509 | layer { 2510 | name: "relu_conv0_5" 2511 | type: "ReLU" 2512 | bottom: "conv0_5" 2513 | top: "conv0_5" 2514 | } 2515 | layer { 2516 | bottom: "conv0_5" 2517 | top: "pool0_5_0" 2518 | name: "pool0_5_0" 2519 | type: "Pooling" 2520 | pooling_param { 2521 | pool: MAX 2522 | kernel_size: 2 2523 | stride: 2 2524 | } 2525 | } 2526 | layer { 2527 | bottom: "pool0_5_0" 2528 | top: "pool0_5_1" 2529 | name: "pool0_5_1" 2530 | type: "Pooling" 2531 | pooling_param { 2532 | pool: MAX 2533 | kernel_size: 2 2534 | stride: 2 2535 | } 2536 | } 2537 | layer { 2538 | bottom: "conv1_11" 2539 | top: "pool1_11_0" 2540 | name: "pool1_11_0" 2541 | type: "Pooling" 2542 | pooling_param { 2543 | pool: MAX 2544 | kernel_size: 2 2545 | stride: 2 2546 | } 2547 | } 2548 | layer { 2549 | bottom: "pool1_11_0" 2550 | top: "pool1_11_1" 2551 | name: "pool1_11_1" 2552 | type: "Pooling" 2553 | pooling_param { 2554 | pool: MAX 2555 | kernel_size: 2 2556 | stride: 2 2557 | } 2558 | } 2559 | layer { 2560 | bottom: "conv2_23" 2561 | top: "pool2_23_0" 2562 | name: "pool2_23_0" 2563 | type: "Pooling" 2564 | pooling_param { 2565 | pool: MAX 2566 | kernel_size: 2 2567 | stride: 2 2568 | } 2569 | } 2570 | layer { 2571 | bottom: "pool2_23_0" 2572 | top: "pool2_23_1" 2573 | name: "pool2_23_1" 2574 | type: "Pooling" 2575 | pooling_param { 2576 | pool: MAX 2577 | kernel_size: 2 2578 | stride: 2 2579 | } 2580 | } 2581 | layer { 2582 | name: "join_pool2_23_plus" 2583 | type: "FractalJoin" 2584 | bottom: "pool0_5_1" 2585 | bottom: "pool1_11_1" 2586 | bottom: "pool2_23_1" 2587 | top: "pool2_23_plus" 2588 | fractal_join_param { 2589 | drop_path_ratio: 0.15 2590 | } 2591 | } 2592 | # Reduction: 6, spatial size: 0 2593 | layer { 2594 | bottom: "data" 2595 | top: "conv2_24" 2596 | name: "conv2_24" 2597 | param { 2598 | lr_mult: 1 2599 | decay_mult: 1 2600 | name: "conv2_24" 2601 | } 2602 | param { 2603 | lr_mult: 2 2604 | decay_mult: 0 2605 | name: "conv2_24_b" 2606 | } 2607 | type: "Convolution" 2608 | convolution_param { 2609 | num_output: 512 2610 | pad: 1 2611 | kernel_size: 3 2612 | weight_filler { 2613 | type: "xavier" 2614 | } 2615 | bias_filler { 2616 | type: "constant" 2617 | } 2618 | } 2619 | } 2620 | layer { 2621 | name: "dropout_conv2_24" 2622 | type: "Dropout" 2623 | bottom: "conv2_24" 2624 | top: "conv2_24" 2625 | dropout_param { 2626 | dropout_ratio: 0.0 2627 | } 2628 | } 2629 | layer { 2630 | name: "batch_conv2_24" 2631 | type: "BatchNorm" 2632 | bottom: "conv2_24" 2633 | top: "conv2_24" 2634 | param { lr_mult: 0 } 2635 | param { lr_mult: 0 } 2636 | param { lr_mult: 0 } 2637 | } 2638 | layer { 2639 | name: "relu_conv2_24" 2640 | type: "ReLU" 2641 | bottom: "conv2_24" 2642 | top: "conv2_24" 2643 | } 2644 | layer { 2645 | bottom: "conv2_24" 2646 | top: "conv2_25" 2647 | name: "conv2_25" 2648 | param { 2649 | lr_mult: 1 2650 | decay_mult: 1 2651 | name: "conv2_25" 2652 | } 2653 | param { 2654 | lr_mult: 2 2655 | decay_mult: 0 2656 | name: "conv2_25_b" 2657 | } 2658 | type: "Convolution" 2659 | convolution_param { 2660 | num_output: 512 2661 | pad: 1 2662 | kernel_size: 3 2663 | weight_filler { 2664 | type: "xavier" 2665 | } 2666 | bias_filler { 2667 | type: "constant" 2668 | } 2669 | } 2670 | } 2671 | layer { 2672 | name: "dropout_conv2_25" 2673 | type: "Dropout" 2674 | bottom: "conv2_25" 2675 | top: "conv2_25" 2676 | dropout_param { 2677 | dropout_ratio: 0.0 2678 | } 2679 | } 2680 | layer { 2681 | name: "batch_conv2_25" 2682 | type: "BatchNorm" 2683 | bottom: "conv2_25" 2684 | top: "conv2_25" 2685 | param { lr_mult: 0 } 2686 | param { lr_mult: 0 } 2687 | param { lr_mult: 0 } 2688 | } 2689 | layer { 2690 | name: "relu_conv2_25" 2691 | type: "ReLU" 2692 | bottom: "conv2_25" 2693 | top: "conv2_25" 2694 | } 2695 | layer { 2696 | bottom: "data" 2697 | top: "conv1_12" 2698 | name: "conv1_12" 2699 | param { 2700 | lr_mult: 1 2701 | decay_mult: 1 2702 | name: "conv1_12" 2703 | } 2704 | param { 2705 | lr_mult: 2 2706 | decay_mult: 0 2707 | name: "conv1_12_b" 2708 | } 2709 | type: "Convolution" 2710 | convolution_param { 2711 | num_output: 512 2712 | pad: 1 2713 | kernel_size: 3 2714 | weight_filler { 2715 | type: "xavier" 2716 | } 2717 | bias_filler { 2718 | type: "constant" 2719 | } 2720 | } 2721 | } 2722 | layer { 2723 | name: "dropout_conv1_12" 2724 | type: "Dropout" 2725 | bottom: "conv1_12" 2726 | top: "conv1_12" 2727 | dropout_param { 2728 | dropout_ratio: 0.0 2729 | } 2730 | } 2731 | layer { 2732 | name: "batch_conv1_12" 2733 | type: "BatchNorm" 2734 | bottom: "conv1_12" 2735 | top: "conv1_12" 2736 | param { lr_mult: 0 } 2737 | param { lr_mult: 0 } 2738 | param { lr_mult: 0 } 2739 | } 2740 | layer { 2741 | name: "relu_conv1_12" 2742 | type: "ReLU" 2743 | bottom: "conv1_12" 2744 | top: "conv1_12" 2745 | } 2746 | layer { 2747 | name: "join_conv2_25_plus" 2748 | type: "FractalJoin" 2749 | bottom: "conv1_12" 2750 | bottom: "conv2_25" 2751 | top: "conv2_25_plus" 2752 | fractal_join_param { 2753 | drop_path_ratio: 0.15 2754 | } 2755 | } 2756 | layer { 2757 | bottom: "conv2_25_plus" 2758 | top: "conv2_26" 2759 | name: "conv2_26" 2760 | param { 2761 | lr_mult: 1 2762 | decay_mult: 1 2763 | name: "conv2_26" 2764 | } 2765 | param { 2766 | lr_mult: 2 2767 | decay_mult: 0 2768 | name: "conv2_26_b" 2769 | } 2770 | type: "Convolution" 2771 | convolution_param { 2772 | num_output: 512 2773 | pad: 1 2774 | kernel_size: 3 2775 | weight_filler { 2776 | type: "xavier" 2777 | } 2778 | bias_filler { 2779 | type: "constant" 2780 | } 2781 | } 2782 | } 2783 | layer { 2784 | name: "dropout_conv2_26" 2785 | type: "Dropout" 2786 | bottom: "conv2_26" 2787 | top: "conv2_26" 2788 | dropout_param { 2789 | dropout_ratio: 0.0 2790 | } 2791 | } 2792 | layer { 2793 | name: "batch_conv2_26" 2794 | type: "BatchNorm" 2795 | bottom: "conv2_26" 2796 | top: "conv2_26" 2797 | param { lr_mult: 0 } 2798 | param { lr_mult: 0 } 2799 | param { lr_mult: 0 } 2800 | } 2801 | layer { 2802 | name: "relu_conv2_26" 2803 | type: "ReLU" 2804 | bottom: "conv2_26" 2805 | top: "conv2_26" 2806 | } 2807 | layer { 2808 | bottom: "conv2_26" 2809 | top: "conv2_27" 2810 | name: "conv2_27" 2811 | param { 2812 | lr_mult: 1 2813 | decay_mult: 1 2814 | name: "conv2_27" 2815 | } 2816 | param { 2817 | lr_mult: 2 2818 | decay_mult: 0 2819 | name: "conv2_27_b" 2820 | } 2821 | type: "Convolution" 2822 | convolution_param { 2823 | num_output: 512 2824 | pad: 1 2825 | kernel_size: 3 2826 | weight_filler { 2827 | type: "xavier" 2828 | } 2829 | bias_filler { 2830 | type: "constant" 2831 | } 2832 | } 2833 | } 2834 | layer { 2835 | name: "dropout_conv2_27" 2836 | type: "Dropout" 2837 | bottom: "conv2_27" 2838 | top: "conv2_27" 2839 | dropout_param { 2840 | dropout_ratio: 0.0 2841 | } 2842 | } 2843 | layer { 2844 | name: "batch_conv2_27" 2845 | type: "BatchNorm" 2846 | bottom: "conv2_27" 2847 | top: "conv2_27" 2848 | param { lr_mult: 0 } 2849 | param { lr_mult: 0 } 2850 | param { lr_mult: 0 } 2851 | } 2852 | layer { 2853 | name: "relu_conv2_27" 2854 | type: "ReLU" 2855 | bottom: "conv2_27" 2856 | top: "conv2_27" 2857 | } 2858 | layer { 2859 | bottom: "conv2_25_plus" 2860 | top: "conv1_13" 2861 | name: "conv1_13" 2862 | param { 2863 | lr_mult: 1 2864 | decay_mult: 1 2865 | name: "conv1_13" 2866 | } 2867 | param { 2868 | lr_mult: 2 2869 | decay_mult: 0 2870 | name: "conv1_13_b" 2871 | } 2872 | type: "Convolution" 2873 | convolution_param { 2874 | num_output: 512 2875 | pad: 1 2876 | kernel_size: 3 2877 | weight_filler { 2878 | type: "xavier" 2879 | } 2880 | bias_filler { 2881 | type: "constant" 2882 | } 2883 | } 2884 | } 2885 | layer { 2886 | name: "dropout_conv1_13" 2887 | type: "Dropout" 2888 | bottom: "conv1_13" 2889 | top: "conv1_13" 2890 | dropout_param { 2891 | dropout_ratio: 0.0 2892 | } 2893 | } 2894 | layer { 2895 | name: "batch_conv1_13" 2896 | type: "BatchNorm" 2897 | bottom: "conv1_13" 2898 | top: "conv1_13" 2899 | param { lr_mult: 0 } 2900 | param { lr_mult: 0 } 2901 | param { lr_mult: 0 } 2902 | } 2903 | layer { 2904 | name: "relu_conv1_13" 2905 | type: "ReLU" 2906 | bottom: "conv1_13" 2907 | top: "conv1_13" 2908 | } 2909 | layer { 2910 | bottom: "data" 2911 | top: "conv0_6" 2912 | name: "conv0_6" 2913 | param { 2914 | lr_mult: 1 2915 | decay_mult: 1 2916 | name: "conv0_6" 2917 | } 2918 | param { 2919 | lr_mult: 2 2920 | decay_mult: 0 2921 | name: "conv0_6_b" 2922 | } 2923 | type: "Convolution" 2924 | convolution_param { 2925 | num_output: 512 2926 | pad: 1 2927 | kernel_size: 3 2928 | weight_filler { 2929 | type: "xavier" 2930 | } 2931 | bias_filler { 2932 | type: "constant" 2933 | } 2934 | } 2935 | } 2936 | layer { 2937 | name: "dropout_conv0_6" 2938 | type: "Dropout" 2939 | bottom: "conv0_6" 2940 | top: "conv0_6" 2941 | dropout_param { 2942 | dropout_ratio: 0.0 2943 | } 2944 | } 2945 | layer { 2946 | name: "batch_conv0_6" 2947 | type: "BatchNorm" 2948 | bottom: "conv0_6" 2949 | top: "conv0_6" 2950 | param { lr_mult: 0 } 2951 | param { lr_mult: 0 } 2952 | param { lr_mult: 0 } 2953 | } 2954 | layer { 2955 | name: "relu_conv0_6" 2956 | type: "ReLU" 2957 | bottom: "conv0_6" 2958 | top: "conv0_6" 2959 | } 2960 | layer { 2961 | bottom: "conv0_6" 2962 | top: "pool0_6_0" 2963 | name: "pool0_6_0" 2964 | type: "Pooling" 2965 | pooling_param { 2966 | pool: MAX 2967 | kernel_size: 2 2968 | stride: 2 2969 | } 2970 | } 2971 | layer { 2972 | bottom: "pool0_6_0" 2973 | top: "pool0_6_1" 2974 | name: "pool0_6_1" 2975 | type: "Pooling" 2976 | pooling_param { 2977 | pool: MAX 2978 | kernel_size: 2 2979 | stride: 2 2980 | } 2981 | } 2982 | layer { 2983 | bottom: "pool0_6_1" 2984 | top: "pool0_6_2" 2985 | name: "pool0_6_2" 2986 | type: "Pooling" 2987 | pooling_param { 2988 | pool: MAX 2989 | kernel_size: 2 2990 | stride: 2 2991 | } 2992 | } 2993 | layer { 2994 | bottom: "pool0_6_2" 2995 | top: "pool0_6_3" 2996 | name: "pool0_6_3" 2997 | type: "Pooling" 2998 | pooling_param { 2999 | pool: MAX 3000 | kernel_size: 2 3001 | stride: 2 3002 | } 3003 | } 3004 | layer { 3005 | bottom: "conv1_13" 3006 | top: "pool1_13_0" 3007 | name: "pool1_13_0" 3008 | type: "Pooling" 3009 | pooling_param { 3010 | pool: MAX 3011 | kernel_size: 2 3012 | stride: 2 3013 | } 3014 | } 3015 | layer { 3016 | bottom: "pool1_13_0" 3017 | top: "pool1_13_1" 3018 | name: "pool1_13_1" 3019 | type: "Pooling" 3020 | pooling_param { 3021 | pool: MAX 3022 | kernel_size: 2 3023 | stride: 2 3024 | } 3025 | } 3026 | layer { 3027 | bottom: "pool1_13_1" 3028 | top: "pool1_13_2" 3029 | name: "pool1_13_2" 3030 | type: "Pooling" 3031 | pooling_param { 3032 | pool: MAX 3033 | kernel_size: 2 3034 | stride: 2 3035 | } 3036 | } 3037 | layer { 3038 | bottom: "pool1_13_2" 3039 | top: "pool1_13_3" 3040 | name: "pool1_13_3" 3041 | type: "Pooling" 3042 | pooling_param { 3043 | pool: MAX 3044 | kernel_size: 2 3045 | stride: 2 3046 | } 3047 | } 3048 | layer { 3049 | bottom: "conv2_27" 3050 | top: "pool2_27_0" 3051 | name: "pool2_27_0" 3052 | type: "Pooling" 3053 | pooling_param { 3054 | pool: MAX 3055 | kernel_size: 2 3056 | stride: 2 3057 | } 3058 | } 3059 | layer { 3060 | bottom: "pool2_27_0" 3061 | top: "pool2_27_1" 3062 | name: "pool2_27_1" 3063 | type: "Pooling" 3064 | pooling_param { 3065 | pool: MAX 3066 | kernel_size: 2 3067 | stride: 2 3068 | } 3069 | } 3070 | layer { 3071 | bottom: "pool2_27_1" 3072 | top: "pool2_27_2" 3073 | name: "pool2_27_2" 3074 | type: "Pooling" 3075 | pooling_param { 3076 | pool: MAX 3077 | kernel_size: 2 3078 | stride: 2 3079 | } 3080 | } 3081 | layer { 3082 | bottom: "pool2_27_2" 3083 | top: "pool2_27_3" 3084 | name: "pool2_27_3" 3085 | type: "Pooling" 3086 | pooling_param { 3087 | pool: MAX 3088 | kernel_size: 2 3089 | stride: 2 3090 | } 3091 | } 3092 | layer { 3093 | name: "join_pool2_27_plus" 3094 | type: "FractalJoin" 3095 | bottom: "pool0_6_3" 3096 | bottom: "pool1_13_3" 3097 | bottom: "pool2_27_3" 3098 | top: "pool2_27_plus" 3099 | fractal_join_param { 3100 | drop_path_ratio: 0.15 3101 | } 3102 | } 3103 | layer { 3104 | name: "join_extra_mid_join2" 3105 | type: "FractalJoin" 3106 | bottom: "pool2_19_plus" 3107 | bottom: "pool2_23_plus" 3108 | bottom: "pool2_27_plus" 3109 | top: "extra_mid_join2" 3110 | fractal_join_param { 3111 | drop_path_ratio: 0.15 3112 | } 3113 | } 3114 | # Reduction: 7, spatial size: 0 3115 | layer { 3116 | name: "prediction0" 3117 | type: "InnerProduct" 3118 | bottom: "extra_mid_join2" 3119 | top: "prediction0" 3120 | param { 3121 | lr_mult: 1 3122 | decay_mult: 1 3123 | name: "prediction0" 3124 | } 3125 | param { 3126 | lr_mult: 2 3127 | decay_mult: 0 3128 | name: "prediction0_b" 3129 | } 3130 | inner_product_param { 3131 | num_output: 10 3132 | weight_filler { 3133 | type: "xavier" 3134 | } 3135 | bias_filler { 3136 | type: "constant" 3137 | } 3138 | } 3139 | } 3140 | layer { 3141 | name: "loss0" 3142 | type: "SoftmaxWithLoss" 3143 | bottom: "prediction0" 3144 | bottom: "label" 3145 | top: "loss0" 3146 | loss_weight: 1.0 3147 | include: { phase: TRAIN } 3148 | } 3149 | 3150 | layer { 3151 | name: "accuracy_loss0" 3152 | type: "Accuracy" 3153 | bottom: "prediction0" 3154 | bottom: "label" 3155 | top: "accuracy_loss0" 3156 | include: { phase: TEST } 3157 | } 3158 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CNNDesignPatterns 2 | 3 | This repository hosts the Caffe code and prototxt files for the CNN Design Patterns paper available at https://arxiv.org/abs/1611.00847. If you use this work in your research, please cite this paper. 4 | 5 | 6 | In order to use the prototxt files, you should install Caffe along with the fractalnet code located at https://github.com/gustavla/fractalnet/tree/master/caffe. 7 | 8 | To use the SBN and TSN prototxt files, you will need to also install the freeze-drop-path code and the instructions are below. 9 | 10 | Prototxt files 11 | ============== 12 | 13 | solver.prototxt: The solver file. 14 | fractalnet.prototxt: The original fractalnet architecture 15 | fractalnet-AvgPool.prototxt: Replaces max pooling with average pooling in the original fractalnet architecture 16 | FoF.prototxt: The fractal of fractalnet architecture. 17 | FoF-TSN.prototxt: Taylor series modification where branch 2 is squared and branch 3 cubed prior to the final fractal join. 18 | SBN-FDP.prototxt: Stagewise Boosting network architecture with freeze-drop-path as the final join. 19 | TSN-FDP.prototxt: Taylor series modification of the SBN architecture. 20 | 21 | 22 | Freeze-drop-path 23 | ================ 24 | 25 | Copy the hpp file into your include/caffe/layers folder and copy the cpp and cu files into your src/caffe/layers folder. 26 | 27 | Then, add the following to your ``src/caffe/proto/caffe.proto`` file in ``LayerParameter``: 28 | 29 | optional FreezeDropPathParameter freeze_drop_path_param = 148; 30 | 31 | Set ``148`` to whatever you want that is not in conflict with another layer's parameters. Also add the following to the bottom ``caffe.proto``: 32 | 33 | message FreezeDropPathParameter { 34 | optional uint32 num_iter_per_cycle = 1 [default = 0]; 35 | optional uint32 interval_type = 2 [default = 0]; 36 | } 37 | 38 | Re-compile and you should now have access to the ``freezedroppath`` unit. 39 | 40 | Usage 41 | ----- 42 | Here is an example of how to use freeze-drop-path with two layers, with the stochastic option and intervals that increase with the square of the branch number, respectively:: 43 | 44 | layer { 45 | name: "freeze_drop_path" 46 | type: "FreezeDropPath" 47 | bottom: "pool2_27_plus" 48 | bottom: "extra_mid_join2a" 49 | top: "extra_mid_join2" 50 | freeze_drop_path_param { 51 | num_iter_per_cycle: 0 52 | interval_type: 0 53 | } 54 | } 55 | 56 | If you want to use freeze-drop-path deterministically, set num_iter_per_cycle to the number of iterations for a cycle through the all the branches. 57 | 58 | -------------------------------------------------------------------------------- /SBN-FDP.prototxt: -------------------------------------------------------------------------------- 1 | name: "FractalOfFractalNet" 2 | layer { 3 | name: "cifar" 4 | type: "Data" 5 | top: "data" 6 | top: "label" 7 | include { 8 | phase: TRAIN 9 | } 10 | transform_param { 11 | mirror: true 12 | mean_file: "examples/cifar10/mean.binaryproto" 13 | } 14 | data_param { 15 | source: "examples/cifar10/cifar10_train_lmdb" 16 | batch_size: 25 17 | backend: LMDB 18 | } 19 | } 20 | layer { 21 | name: "cifar" 22 | type: "Data" 23 | top: "data" 24 | top: "label" 25 | include { 26 | phase: TEST 27 | } 28 | transform_param { 29 | mean_file: "examples/cifar10/mean.binaryproto" 30 | mirror: false 31 | # shuffle: true 32 | } 33 | data_param { 34 | source: "examples/cifar10/cifar10_test_lmdb" 35 | batch_size: 10 36 | backend: LMDB 37 | } 38 | } 39 | # Input size: 32 40 | layer { 41 | bottom: "data" 42 | top: "conv2_0" 43 | name: "conv2_0" 44 | param { 45 | lr_mult: 1 46 | decay_mult: 1 47 | name: "conv2_0" 48 | } 49 | param { 50 | lr_mult: 2 51 | decay_mult: 0 52 | name: "conv2_0_b" 53 | } 54 | type: "Convolution" 55 | convolution_param { 56 | num_output: 64 57 | pad: 1 58 | kernel_size: 3 59 | weight_filler { 60 | type: "xavier" 61 | } 62 | bias_filler { 63 | type: "constant" 64 | } 65 | } 66 | } 67 | layer { 68 | name: "dropout_conv2_0" 69 | type: "Dropout" 70 | bottom: "conv2_0" 71 | top: "conv2_0" 72 | dropout_param { 73 | dropout_ratio: 0.0 74 | } 75 | } 76 | layer { 77 | name: "batch_conv2_0" 78 | type: "BatchNorm" 79 | bottom: "conv2_0" 80 | top: "conv2_0" 81 | param { lr_mult: 0 } 82 | param { lr_mult: 0 } 83 | param { lr_mult: 0 } 84 | } 85 | layer { 86 | name: "relu_conv2_0" 87 | type: "ReLU" 88 | bottom: "conv2_0" 89 | top: "conv2_0" 90 | } 91 | layer { 92 | bottom: "conv2_0" 93 | top: "conv2_1" 94 | name: "conv2_1" 95 | param { 96 | lr_mult: 1 97 | decay_mult: 1 98 | name: "conv2_1" 99 | } 100 | param { 101 | lr_mult: 2 102 | decay_mult: 0 103 | name: "conv2_1_b" 104 | } 105 | type: "Convolution" 106 | convolution_param { 107 | num_output: 64 108 | pad: 1 109 | kernel_size: 3 110 | weight_filler { 111 | type: "xavier" 112 | } 113 | bias_filler { 114 | type: "constant" 115 | } 116 | } 117 | } 118 | layer { 119 | name: "dropout_conv2_1" 120 | type: "Dropout" 121 | bottom: "conv2_1" 122 | top: "conv2_1" 123 | dropout_param { 124 | dropout_ratio: 0.0 125 | } 126 | } 127 | layer { 128 | name: "batch_conv2_1" 129 | type: "BatchNorm" 130 | bottom: "conv2_1" 131 | top: "conv2_1" 132 | param { lr_mult: 0 } 133 | param { lr_mult: 0 } 134 | param { lr_mult: 0 } 135 | } 136 | layer { 137 | name: "relu_conv2_1" 138 | type: "ReLU" 139 | bottom: "conv2_1" 140 | top: "conv2_1" 141 | } 142 | layer { 143 | bottom: "data" 144 | top: "conv1_0" 145 | name: "conv1_0" 146 | param { 147 | lr_mult: 1 148 | decay_mult: 1 149 | name: "conv1_0" 150 | } 151 | param { 152 | lr_mult: 2 153 | decay_mult: 0 154 | name: "conv1_0_b" 155 | } 156 | type: "Convolution" 157 | convolution_param { 158 | num_output: 64 159 | pad: 1 160 | kernel_size: 3 161 | weight_filler { 162 | type: "xavier" 163 | } 164 | bias_filler { 165 | type: "constant" 166 | } 167 | } 168 | } 169 | layer { 170 | name: "dropout_conv1_0" 171 | type: "Dropout" 172 | bottom: "conv1_0" 173 | top: "conv1_0" 174 | dropout_param { 175 | dropout_ratio: 0.0 176 | } 177 | } 178 | layer { 179 | name: "batch_conv1_0" 180 | type: "BatchNorm" 181 | bottom: "conv1_0" 182 | top: "conv1_0" 183 | param { lr_mult: 0 } 184 | param { lr_mult: 0 } 185 | param { lr_mult: 0 } 186 | } 187 | layer { 188 | name: "relu_conv1_0" 189 | type: "ReLU" 190 | bottom: "conv1_0" 191 | top: "conv1_0" 192 | } 193 | layer { 194 | name: "join_conv2_1_plus" 195 | type: "FractalJoin" 196 | bottom: "conv1_0" 197 | bottom: "conv2_1" 198 | top: "conv2_1_plus" 199 | fractal_join_param { 200 | drop_path_ratio: 0.15 201 | } 202 | } 203 | layer { 204 | bottom: "conv2_1_plus" 205 | top: "conv2_2" 206 | name: "conv2_2" 207 | param { 208 | lr_mult: 1 209 | decay_mult: 1 210 | name: "conv2_2" 211 | } 212 | param { 213 | lr_mult: 2 214 | decay_mult: 0 215 | name: "conv2_2_b" 216 | } 217 | type: "Convolution" 218 | convolution_param { 219 | num_output: 64 220 | pad: 1 221 | kernel_size: 3 222 | weight_filler { 223 | type: "xavier" 224 | } 225 | bias_filler { 226 | type: "constant" 227 | } 228 | } 229 | } 230 | layer { 231 | name: "dropout_conv2_2" 232 | type: "Dropout" 233 | bottom: "conv2_2" 234 | top: "conv2_2" 235 | dropout_param { 236 | dropout_ratio: 0.0 237 | } 238 | } 239 | layer { 240 | name: "batch_conv2_2" 241 | type: "BatchNorm" 242 | bottom: "conv2_2" 243 | top: "conv2_2" 244 | param { lr_mult: 0 } 245 | param { lr_mult: 0 } 246 | param { lr_mult: 0 } 247 | } 248 | layer { 249 | name: "relu_conv2_2" 250 | type: "ReLU" 251 | bottom: "conv2_2" 252 | top: "conv2_2" 253 | } 254 | layer { 255 | bottom: "conv2_2" 256 | top: "conv2_3" 257 | name: "conv2_3" 258 | param { 259 | lr_mult: 1 260 | decay_mult: 1 261 | name: "conv2_3" 262 | } 263 | param { 264 | lr_mult: 2 265 | decay_mult: 0 266 | name: "conv2_3_b" 267 | } 268 | type: "Convolution" 269 | convolution_param { 270 | num_output: 64 271 | pad: 1 272 | kernel_size: 3 273 | weight_filler { 274 | type: "xavier" 275 | } 276 | bias_filler { 277 | type: "constant" 278 | } 279 | } 280 | } 281 | layer { 282 | name: "dropout_conv2_3" 283 | type: "Dropout" 284 | bottom: "conv2_3" 285 | top: "conv2_3" 286 | dropout_param { 287 | dropout_ratio: 0.0 288 | } 289 | } 290 | layer { 291 | name: "batch_conv2_3" 292 | type: "BatchNorm" 293 | bottom: "conv2_3" 294 | top: "conv2_3" 295 | param { lr_mult: 0 } 296 | param { lr_mult: 0 } 297 | param { lr_mult: 0 } 298 | } 299 | layer { 300 | name: "relu_conv2_3" 301 | type: "ReLU" 302 | bottom: "conv2_3" 303 | top: "conv2_3" 304 | } 305 | layer { 306 | bottom: "conv2_1_plus" 307 | top: "conv1_1" 308 | name: "conv1_1" 309 | param { 310 | lr_mult: 1 311 | decay_mult: 1 312 | name: "conv1_1" 313 | } 314 | param { 315 | lr_mult: 2 316 | decay_mult: 0 317 | name: "conv1_1_b" 318 | } 319 | type: "Convolution" 320 | convolution_param { 321 | num_output: 64 322 | pad: 1 323 | kernel_size: 3 324 | weight_filler { 325 | type: "xavier" 326 | } 327 | bias_filler { 328 | type: "constant" 329 | } 330 | } 331 | } 332 | layer { 333 | name: "dropout_conv1_1" 334 | type: "Dropout" 335 | bottom: "conv1_1" 336 | top: "conv1_1" 337 | dropout_param { 338 | dropout_ratio: 0.0 339 | } 340 | } 341 | layer { 342 | name: "batch_conv1_1" 343 | type: "BatchNorm" 344 | bottom: "conv1_1" 345 | top: "conv1_1" 346 | param { lr_mult: 0 } 347 | param { lr_mult: 0 } 348 | param { lr_mult: 0 } 349 | } 350 | layer { 351 | name: "relu_conv1_1" 352 | type: "ReLU" 353 | bottom: "conv1_1" 354 | top: "conv1_1" 355 | } 356 | layer { 357 | bottom: "data" 358 | top: "conv0_0" 359 | name: "conv0_0" 360 | param { 361 | lr_mult: 1 362 | decay_mult: 1 363 | name: "conv0_0" 364 | } 365 | param { 366 | lr_mult: 2 367 | decay_mult: 0 368 | name: "conv0_0_b" 369 | } 370 | type: "Convolution" 371 | convolution_param { 372 | num_output: 64 373 | pad: 1 374 | kernel_size: 3 375 | weight_filler { 376 | type: "xavier" 377 | } 378 | bias_filler { 379 | type: "constant" 380 | } 381 | } 382 | } 383 | layer { 384 | name: "dropout_conv0_0" 385 | type: "Dropout" 386 | bottom: "conv0_0" 387 | top: "conv0_0" 388 | dropout_param { 389 | dropout_ratio: 0.0 390 | } 391 | } 392 | layer { 393 | name: "batch_conv0_0" 394 | type: "BatchNorm" 395 | bottom: "conv0_0" 396 | top: "conv0_0" 397 | param { lr_mult: 0 } 398 | param { lr_mult: 0 } 399 | param { lr_mult: 0 } 400 | } 401 | layer { 402 | name: "relu_conv0_0" 403 | type: "ReLU" 404 | bottom: "conv0_0" 405 | top: "conv0_0" 406 | } 407 | layer { 408 | bottom: "conv0_0" 409 | top: "pool0_0" 410 | name: "pool0_0" 411 | type: "Pooling" 412 | pooling_param { 413 | pool: MAX 414 | kernel_size: 2 415 | stride: 2 416 | } 417 | } 418 | layer { 419 | bottom: "conv1_1" 420 | top: "pool1_1" 421 | name: "pool1_1" 422 | type: "Pooling" 423 | pooling_param { 424 | pool: MAX 425 | kernel_size: 2 426 | stride: 2 427 | } 428 | } 429 | layer { 430 | bottom: "conv2_3" 431 | top: "pool2_3" 432 | name: "pool2_3" 433 | type: "Pooling" 434 | pooling_param { 435 | pool: MAX 436 | kernel_size: 2 437 | stride: 2 438 | } 439 | } 440 | layer { 441 | name: "join_pool2_3_plus" 442 | type: "FractalJoin" 443 | bottom: "pool0_0" 444 | bottom: "pool1_1" 445 | bottom: "pool2_3" 446 | top: "pool2_3_plus" 447 | fractal_join_param { 448 | drop_path_ratio: 0.15 449 | } 450 | } 451 | # Reduction: 1, spatial size: 16 452 | layer { 453 | bottom: "pool2_3_plus" 454 | top: "conv2_4" 455 | name: "conv2_4" 456 | param { 457 | lr_mult: 1 458 | decay_mult: 1 459 | name: "conv2_4" 460 | } 461 | param { 462 | lr_mult: 2 463 | decay_mult: 0 464 | name: "conv2_4_b" 465 | } 466 | type: "Convolution" 467 | convolution_param { 468 | num_output: 128 469 | pad: 1 470 | kernel_size: 3 471 | weight_filler { 472 | type: "xavier" 473 | } 474 | bias_filler { 475 | type: "constant" 476 | } 477 | } 478 | } 479 | layer { 480 | name: "dropout_conv2_4" 481 | type: "Dropout" 482 | bottom: "conv2_4" 483 | top: "conv2_4" 484 | dropout_param { 485 | dropout_ratio: 0.1 486 | } 487 | } 488 | layer { 489 | name: "batch_conv2_4" 490 | type: "BatchNorm" 491 | bottom: "conv2_4" 492 | top: "conv2_4" 493 | param { lr_mult: 0 } 494 | param { lr_mult: 0 } 495 | param { lr_mult: 0 } 496 | } 497 | layer { 498 | name: "relu_conv2_4" 499 | type: "ReLU" 500 | bottom: "conv2_4" 501 | top: "conv2_4" 502 | } 503 | layer { 504 | bottom: "conv2_4" 505 | top: "conv2_5" 506 | name: "conv2_5" 507 | param { 508 | lr_mult: 1 509 | decay_mult: 1 510 | name: "conv2_5" 511 | } 512 | param { 513 | lr_mult: 2 514 | decay_mult: 0 515 | name: "conv2_5_b" 516 | } 517 | type: "Convolution" 518 | convolution_param { 519 | num_output: 128 520 | pad: 1 521 | kernel_size: 3 522 | weight_filler { 523 | type: "xavier" 524 | } 525 | bias_filler { 526 | type: "constant" 527 | } 528 | } 529 | } 530 | layer { 531 | name: "dropout_conv2_5" 532 | type: "Dropout" 533 | bottom: "conv2_5" 534 | top: "conv2_5" 535 | dropout_param { 536 | dropout_ratio: 0.1 537 | } 538 | } 539 | layer { 540 | name: "batch_conv2_5" 541 | type: "BatchNorm" 542 | bottom: "conv2_5" 543 | top: "conv2_5" 544 | param { lr_mult: 0 } 545 | param { lr_mult: 0 } 546 | param { lr_mult: 0 } 547 | } 548 | layer { 549 | name: "relu_conv2_5" 550 | type: "ReLU" 551 | bottom: "conv2_5" 552 | top: "conv2_5" 553 | } 554 | layer { 555 | bottom: "pool2_3_plus" 556 | top: "conv1_2" 557 | name: "conv1_2" 558 | param { 559 | lr_mult: 1 560 | decay_mult: 1 561 | name: "conv1_2" 562 | } 563 | param { 564 | lr_mult: 2 565 | decay_mult: 0 566 | name: "conv1_2_b" 567 | } 568 | type: "Convolution" 569 | convolution_param { 570 | num_output: 128 571 | pad: 1 572 | kernel_size: 3 573 | weight_filler { 574 | type: "xavier" 575 | } 576 | bias_filler { 577 | type: "constant" 578 | } 579 | } 580 | } 581 | layer { 582 | name: "dropout_conv1_2" 583 | type: "Dropout" 584 | bottom: "conv1_2" 585 | top: "conv1_2" 586 | dropout_param { 587 | dropout_ratio: 0.1 588 | } 589 | } 590 | layer { 591 | name: "batch_conv1_2" 592 | type: "BatchNorm" 593 | bottom: "conv1_2" 594 | top: "conv1_2" 595 | param { lr_mult: 0 } 596 | param { lr_mult: 0 } 597 | param { lr_mult: 0 } 598 | } 599 | layer { 600 | name: "relu_conv1_2" 601 | type: "ReLU" 602 | bottom: "conv1_2" 603 | top: "conv1_2" 604 | } 605 | layer { 606 | name: "join_conv2_5_plus" 607 | type: "FractalJoin" 608 | bottom: "conv1_2" 609 | bottom: "conv2_5" 610 | top: "conv2_5_plus" 611 | fractal_join_param { 612 | drop_path_ratio: 0.15 613 | } 614 | } 615 | layer { 616 | bottom: "conv2_5_plus" 617 | top: "conv2_6" 618 | name: "conv2_6" 619 | param { 620 | lr_mult: 1 621 | decay_mult: 1 622 | name: "conv2_6" 623 | } 624 | param { 625 | lr_mult: 2 626 | decay_mult: 0 627 | name: "conv2_6_b" 628 | } 629 | type: "Convolution" 630 | convolution_param { 631 | num_output: 128 632 | pad: 1 633 | kernel_size: 3 634 | weight_filler { 635 | type: "xavier" 636 | } 637 | bias_filler { 638 | type: "constant" 639 | } 640 | } 641 | } 642 | layer { 643 | name: "dropout_conv2_6" 644 | type: "Dropout" 645 | bottom: "conv2_6" 646 | top: "conv2_6" 647 | dropout_param { 648 | dropout_ratio: 0.1 649 | } 650 | } 651 | layer { 652 | name: "batch_conv2_6" 653 | type: "BatchNorm" 654 | bottom: "conv2_6" 655 | top: "conv2_6" 656 | param { lr_mult: 0 } 657 | param { lr_mult: 0 } 658 | param { lr_mult: 0 } 659 | } 660 | layer { 661 | name: "relu_conv2_6" 662 | type: "ReLU" 663 | bottom: "conv2_6" 664 | top: "conv2_6" 665 | } 666 | layer { 667 | bottom: "conv2_6" 668 | top: "conv2_7" 669 | name: "conv2_7" 670 | param { 671 | lr_mult: 1 672 | decay_mult: 1 673 | name: "conv2_7" 674 | } 675 | param { 676 | lr_mult: 2 677 | decay_mult: 0 678 | name: "conv2_7_b" 679 | } 680 | type: "Convolution" 681 | convolution_param { 682 | num_output: 128 683 | pad: 1 684 | kernel_size: 3 685 | weight_filler { 686 | type: "xavier" 687 | } 688 | bias_filler { 689 | type: "constant" 690 | } 691 | } 692 | } 693 | layer { 694 | name: "dropout_conv2_7" 695 | type: "Dropout" 696 | bottom: "conv2_7" 697 | top: "conv2_7" 698 | dropout_param { 699 | dropout_ratio: 0.1 700 | } 701 | } 702 | layer { 703 | name: "batch_conv2_7" 704 | type: "BatchNorm" 705 | bottom: "conv2_7" 706 | top: "conv2_7" 707 | param { lr_mult: 0 } 708 | param { lr_mult: 0 } 709 | param { lr_mult: 0 } 710 | } 711 | layer { 712 | name: "relu_conv2_7" 713 | type: "ReLU" 714 | bottom: "conv2_7" 715 | top: "conv2_7" 716 | } 717 | layer { 718 | bottom: "conv2_5_plus" 719 | top: "conv1_3" 720 | name: "conv1_3" 721 | param { 722 | lr_mult: 1 723 | decay_mult: 1 724 | name: "conv1_3" 725 | } 726 | param { 727 | lr_mult: 2 728 | decay_mult: 0 729 | name: "conv1_3_b" 730 | } 731 | type: "Convolution" 732 | convolution_param { 733 | num_output: 128 734 | pad: 1 735 | kernel_size: 3 736 | weight_filler { 737 | type: "xavier" 738 | } 739 | bias_filler { 740 | type: "constant" 741 | } 742 | } 743 | } 744 | layer { 745 | name: "dropout_conv1_3" 746 | type: "Dropout" 747 | bottom: "conv1_3" 748 | top: "conv1_3" 749 | dropout_param { 750 | dropout_ratio: 0.1 751 | } 752 | } 753 | layer { 754 | name: "batch_conv1_3" 755 | type: "BatchNorm" 756 | bottom: "conv1_3" 757 | top: "conv1_3" 758 | param { lr_mult: 0 } 759 | param { lr_mult: 0 } 760 | param { lr_mult: 0 } 761 | } 762 | layer { 763 | name: "relu_conv1_3" 764 | type: "ReLU" 765 | bottom: "conv1_3" 766 | top: "conv1_3" 767 | } 768 | layer { 769 | bottom: "pool2_3_plus" 770 | top: "conv0_1" 771 | name: "conv0_1" 772 | param { 773 | lr_mult: 1 774 | decay_mult: 1 775 | name: "conv0_1" 776 | } 777 | param { 778 | lr_mult: 2 779 | decay_mult: 0 780 | name: "conv0_1_b" 781 | } 782 | type: "Convolution" 783 | convolution_param { 784 | num_output: 128 785 | pad: 1 786 | kernel_size: 3 787 | weight_filler { 788 | type: "xavier" 789 | } 790 | bias_filler { 791 | type: "constant" 792 | } 793 | } 794 | } 795 | layer { 796 | name: "dropout_conv0_1" 797 | type: "Dropout" 798 | bottom: "conv0_1" 799 | top: "conv0_1" 800 | dropout_param { 801 | dropout_ratio: 0.1 802 | } 803 | } 804 | layer { 805 | name: "batch_conv0_1" 806 | type: "BatchNorm" 807 | bottom: "conv0_1" 808 | top: "conv0_1" 809 | param { lr_mult: 0 } 810 | param { lr_mult: 0 } 811 | param { lr_mult: 0 } 812 | } 813 | layer { 814 | name: "relu_conv0_1" 815 | type: "ReLU" 816 | bottom: "conv0_1" 817 | top: "conv0_1" 818 | } 819 | layer { 820 | bottom: "conv0_1" 821 | top: "pool0_1" 822 | name: "pool0_1" 823 | type: "Pooling" 824 | pooling_param { 825 | pool: MAX 826 | kernel_size: 2 827 | stride: 2 828 | } 829 | } 830 | layer { 831 | bottom: "conv1_3" 832 | top: "pool1_3" 833 | name: "pool1_3" 834 | type: "Pooling" 835 | pooling_param { 836 | pool: MAX 837 | kernel_size: 2 838 | stride: 2 839 | } 840 | } 841 | layer { 842 | bottom: "conv2_7" 843 | top: "pool2_7" 844 | name: "pool2_7" 845 | type: "Pooling" 846 | pooling_param { 847 | pool: MAX 848 | kernel_size: 2 849 | stride: 2 850 | } 851 | } 852 | layer { 853 | name: "join_pool2_7_plus" 854 | type: "FractalJoin" 855 | bottom: "pool0_1" 856 | bottom: "pool1_3" 857 | bottom: "pool2_7" 858 | top: "pool2_7_plus" 859 | fractal_join_param { 860 | drop_path_ratio: 0.15 861 | } 862 | } 863 | # Reduction: 2, spatial size: 8 864 | layer { 865 | bottom: "data" 866 | top: "conv2_8" 867 | name: "conv2_8" 868 | param { 869 | lr_mult: 1 870 | decay_mult: 1 871 | name: "conv2_8" 872 | } 873 | param { 874 | lr_mult: 2 875 | decay_mult: 0 876 | name: "conv2_8_b" 877 | } 878 | type: "Convolution" 879 | convolution_param { 880 | num_output: 128 881 | pad: 1 882 | kernel_size: 3 883 | weight_filler { 884 | type: "xavier" 885 | } 886 | bias_filler { 887 | type: "constant" 888 | } 889 | } 890 | } 891 | layer { 892 | name: "dropout_conv2_8" 893 | type: "Dropout" 894 | bottom: "conv2_8" 895 | top: "conv2_8" 896 | dropout_param { 897 | dropout_ratio: 0.0 898 | } 899 | } 900 | layer { 901 | name: "batch_conv2_8" 902 | type: "BatchNorm" 903 | bottom: "conv2_8" 904 | top: "conv2_8" 905 | param { lr_mult: 0 } 906 | param { lr_mult: 0 } 907 | param { lr_mult: 0 } 908 | } 909 | layer { 910 | name: "relu_conv2_8" 911 | type: "ReLU" 912 | bottom: "conv2_8" 913 | top: "conv2_8" 914 | } 915 | layer { 916 | bottom: "conv2_8" 917 | top: "conv2_9" 918 | name: "conv2_9" 919 | param { 920 | lr_mult: 1 921 | decay_mult: 1 922 | name: "conv2_9" 923 | } 924 | param { 925 | lr_mult: 2 926 | decay_mult: 0 927 | name: "conv2_9_b" 928 | } 929 | type: "Convolution" 930 | convolution_param { 931 | num_output: 128 932 | pad: 1 933 | kernel_size: 3 934 | weight_filler { 935 | type: "xavier" 936 | } 937 | bias_filler { 938 | type: "constant" 939 | } 940 | } 941 | } 942 | layer { 943 | name: "dropout_conv2_9" 944 | type: "Dropout" 945 | bottom: "conv2_9" 946 | top: "conv2_9" 947 | dropout_param { 948 | dropout_ratio: 0.0 949 | } 950 | } 951 | layer { 952 | name: "batch_conv2_9" 953 | type: "BatchNorm" 954 | bottom: "conv2_9" 955 | top: "conv2_9" 956 | param { lr_mult: 0 } 957 | param { lr_mult: 0 } 958 | param { lr_mult: 0 } 959 | } 960 | layer { 961 | name: "relu_conv2_9" 962 | type: "ReLU" 963 | bottom: "conv2_9" 964 | top: "conv2_9" 965 | } 966 | layer { 967 | bottom: "data" 968 | top: "conv1_4" 969 | name: "conv1_4" 970 | param { 971 | lr_mult: 1 972 | decay_mult: 1 973 | name: "conv1_4" 974 | } 975 | param { 976 | lr_mult: 2 977 | decay_mult: 0 978 | name: "conv1_4_b" 979 | } 980 | type: "Convolution" 981 | convolution_param { 982 | num_output: 128 983 | pad: 1 984 | kernel_size: 3 985 | weight_filler { 986 | type: "xavier" 987 | } 988 | bias_filler { 989 | type: "constant" 990 | } 991 | } 992 | } 993 | layer { 994 | name: "dropout_conv1_4" 995 | type: "Dropout" 996 | bottom: "conv1_4" 997 | top: "conv1_4" 998 | dropout_param { 999 | dropout_ratio: 0.0 1000 | } 1001 | } 1002 | layer { 1003 | name: "batch_conv1_4" 1004 | type: "BatchNorm" 1005 | bottom: "conv1_4" 1006 | top: "conv1_4" 1007 | param { lr_mult: 0 } 1008 | param { lr_mult: 0 } 1009 | param { lr_mult: 0 } 1010 | } 1011 | layer { 1012 | name: "relu_conv1_4" 1013 | type: "ReLU" 1014 | bottom: "conv1_4" 1015 | top: "conv1_4" 1016 | } 1017 | layer { 1018 | name: "join_conv2_9_plus" 1019 | type: "FractalJoin" 1020 | bottom: "conv1_4" 1021 | bottom: "conv2_9" 1022 | top: "conv2_9_plus" 1023 | fractal_join_param { 1024 | drop_path_ratio: 0.15 1025 | } 1026 | } 1027 | layer { 1028 | bottom: "conv2_9_plus" 1029 | top: "conv2_10" 1030 | name: "conv2_10" 1031 | param { 1032 | lr_mult: 1 1033 | decay_mult: 1 1034 | name: "conv2_10" 1035 | } 1036 | param { 1037 | lr_mult: 2 1038 | decay_mult: 0 1039 | name: "conv2_10_b" 1040 | } 1041 | type: "Convolution" 1042 | convolution_param { 1043 | num_output: 128 1044 | pad: 1 1045 | kernel_size: 3 1046 | weight_filler { 1047 | type: "xavier" 1048 | } 1049 | bias_filler { 1050 | type: "constant" 1051 | } 1052 | } 1053 | } 1054 | layer { 1055 | name: "dropout_conv2_10" 1056 | type: "Dropout" 1057 | bottom: "conv2_10" 1058 | top: "conv2_10" 1059 | dropout_param { 1060 | dropout_ratio: 0.0 1061 | } 1062 | } 1063 | layer { 1064 | name: "batch_conv2_10" 1065 | type: "BatchNorm" 1066 | bottom: "conv2_10" 1067 | top: "conv2_10" 1068 | param { lr_mult: 0 } 1069 | param { lr_mult: 0 } 1070 | param { lr_mult: 0 } 1071 | } 1072 | layer { 1073 | name: "relu_conv2_10" 1074 | type: "ReLU" 1075 | bottom: "conv2_10" 1076 | top: "conv2_10" 1077 | } 1078 | layer { 1079 | bottom: "conv2_10" 1080 | top: "conv2_11" 1081 | name: "conv2_11" 1082 | param { 1083 | lr_mult: 1 1084 | decay_mult: 1 1085 | name: "conv2_11" 1086 | } 1087 | param { 1088 | lr_mult: 2 1089 | decay_mult: 0 1090 | name: "conv2_11_b" 1091 | } 1092 | type: "Convolution" 1093 | convolution_param { 1094 | num_output: 128 1095 | pad: 1 1096 | kernel_size: 3 1097 | weight_filler { 1098 | type: "xavier" 1099 | } 1100 | bias_filler { 1101 | type: "constant" 1102 | } 1103 | } 1104 | } 1105 | layer { 1106 | name: "dropout_conv2_11" 1107 | type: "Dropout" 1108 | bottom: "conv2_11" 1109 | top: "conv2_11" 1110 | dropout_param { 1111 | dropout_ratio: 0.0 1112 | } 1113 | } 1114 | layer { 1115 | name: "batch_conv2_11" 1116 | type: "BatchNorm" 1117 | bottom: "conv2_11" 1118 | top: "conv2_11" 1119 | param { lr_mult: 0 } 1120 | param { lr_mult: 0 } 1121 | param { lr_mult: 0 } 1122 | } 1123 | layer { 1124 | name: "relu_conv2_11" 1125 | type: "ReLU" 1126 | bottom: "conv2_11" 1127 | top: "conv2_11" 1128 | } 1129 | layer { 1130 | bottom: "conv2_9_plus" 1131 | top: "conv1_5" 1132 | name: "conv1_5" 1133 | param { 1134 | lr_mult: 1 1135 | decay_mult: 1 1136 | name: "conv1_5" 1137 | } 1138 | param { 1139 | lr_mult: 2 1140 | decay_mult: 0 1141 | name: "conv1_5_b" 1142 | } 1143 | type: "Convolution" 1144 | convolution_param { 1145 | num_output: 128 1146 | pad: 1 1147 | kernel_size: 3 1148 | weight_filler { 1149 | type: "xavier" 1150 | } 1151 | bias_filler { 1152 | type: "constant" 1153 | } 1154 | } 1155 | } 1156 | layer { 1157 | name: "dropout_conv1_5" 1158 | type: "Dropout" 1159 | bottom: "conv1_5" 1160 | top: "conv1_5" 1161 | dropout_param { 1162 | dropout_ratio: 0.0 1163 | } 1164 | } 1165 | layer { 1166 | name: "batch_conv1_5" 1167 | type: "BatchNorm" 1168 | bottom: "conv1_5" 1169 | top: "conv1_5" 1170 | param { lr_mult: 0 } 1171 | param { lr_mult: 0 } 1172 | param { lr_mult: 0 } 1173 | } 1174 | layer { 1175 | name: "relu_conv1_5" 1176 | type: "ReLU" 1177 | bottom: "conv1_5" 1178 | top: "conv1_5" 1179 | } 1180 | layer { 1181 | bottom: "data" 1182 | top: "conv0_2" 1183 | name: "conv0_2" 1184 | param { 1185 | lr_mult: 1 1186 | decay_mult: 1 1187 | name: "conv0_2" 1188 | } 1189 | param { 1190 | lr_mult: 2 1191 | decay_mult: 0 1192 | name: "conv0_2_b" 1193 | } 1194 | type: "Convolution" 1195 | convolution_param { 1196 | num_output: 128 1197 | pad: 1 1198 | kernel_size: 3 1199 | weight_filler { 1200 | type: "xavier" 1201 | } 1202 | bias_filler { 1203 | type: "constant" 1204 | } 1205 | } 1206 | } 1207 | layer { 1208 | name: "dropout_conv0_2" 1209 | type: "Dropout" 1210 | bottom: "conv0_2" 1211 | top: "conv0_2" 1212 | dropout_param { 1213 | dropout_ratio: 0.0 1214 | } 1215 | } 1216 | layer { 1217 | name: "batch_conv0_2" 1218 | type: "BatchNorm" 1219 | bottom: "conv0_2" 1220 | top: "conv0_2" 1221 | param { lr_mult: 0 } 1222 | param { lr_mult: 0 } 1223 | param { lr_mult: 0 } 1224 | } 1225 | layer { 1226 | name: "relu_conv0_2" 1227 | type: "ReLU" 1228 | bottom: "conv0_2" 1229 | top: "conv0_2" 1230 | } 1231 | layer { 1232 | bottom: "conv0_2" 1233 | top: "pool0_2_0" 1234 | name: "pool0_2_0" 1235 | type: "Pooling" 1236 | pooling_param { 1237 | pool: MAX 1238 | kernel_size: 2 1239 | stride: 2 1240 | } 1241 | } 1242 | layer { 1243 | bottom: "pool0_2_0" 1244 | top: "pool0_2_1" 1245 | name: "pool0_2_1" 1246 | type: "Pooling" 1247 | pooling_param { 1248 | pool: MAX 1249 | kernel_size: 2 1250 | stride: 2 1251 | } 1252 | } 1253 | layer { 1254 | bottom: "conv1_5" 1255 | top: "pool1_5_0" 1256 | name: "pool1_5_0" 1257 | type: "Pooling" 1258 | pooling_param { 1259 | pool: MAX 1260 | kernel_size: 2 1261 | stride: 2 1262 | } 1263 | } 1264 | layer { 1265 | bottom: "pool1_5_0" 1266 | top: "pool1_5_1" 1267 | name: "pool1_5_1" 1268 | type: "Pooling" 1269 | pooling_param { 1270 | pool: MAX 1271 | kernel_size: 2 1272 | stride: 2 1273 | } 1274 | } 1275 | layer { 1276 | bottom: "conv2_11" 1277 | top: "pool2_11_0" 1278 | name: "pool2_11_0" 1279 | type: "Pooling" 1280 | pooling_param { 1281 | pool: MAX 1282 | kernel_size: 2 1283 | stride: 2 1284 | } 1285 | } 1286 | layer { 1287 | bottom: "pool2_11_0" 1288 | top: "pool2_11_1" 1289 | name: "pool2_11_1" 1290 | type: "Pooling" 1291 | pooling_param { 1292 | pool: MAX 1293 | kernel_size: 2 1294 | stride: 2 1295 | } 1296 | } 1297 | layer { 1298 | name: "join_pool2_11_plus" 1299 | type: "FractalJoin" 1300 | bottom: "pool0_2_1" 1301 | bottom: "pool1_5_1" 1302 | bottom: "pool2_11_1" 1303 | top: "pool2_11_plus" 1304 | fractal_join_param { 1305 | drop_path_ratio: 0.15 1306 | } 1307 | } 1308 | layer { 1309 | name: "join_extra_mid_join" 1310 | type: "FractalJoin" 1311 | bottom: "pool2_7_plus" 1312 | bottom: "pool2_11_plus" 1313 | top: "extra_mid_join" 1314 | fractal_join_param { 1315 | drop_path_ratio: 0.15 1316 | } 1317 | } 1318 | # Reduction: 3, spatial size: 4 1319 | layer { 1320 | bottom: "extra_mid_join" 1321 | top: "conv2_12" 1322 | name: "conv2_12" 1323 | param { 1324 | lr_mult: 1 1325 | decay_mult: 1 1326 | name: "conv2_12" 1327 | } 1328 | param { 1329 | lr_mult: 2 1330 | decay_mult: 0 1331 | name: "conv2_12_b" 1332 | } 1333 | type: "Convolution" 1334 | convolution_param { 1335 | num_output: 256 1336 | pad: 1 1337 | kernel_size: 3 1338 | weight_filler { 1339 | type: "xavier" 1340 | } 1341 | bias_filler { 1342 | type: "constant" 1343 | } 1344 | } 1345 | } 1346 | layer { 1347 | name: "dropout_conv2_12" 1348 | type: "Dropout" 1349 | bottom: "conv2_12" 1350 | top: "conv2_12" 1351 | dropout_param { 1352 | dropout_ratio: 0.2 1353 | } 1354 | } 1355 | layer { 1356 | name: "batch_conv2_12" 1357 | type: "BatchNorm" 1358 | bottom: "conv2_12" 1359 | top: "conv2_12" 1360 | param { lr_mult: 0 } 1361 | param { lr_mult: 0 } 1362 | param { lr_mult: 0 } 1363 | } 1364 | layer { 1365 | name: "relu_conv2_12" 1366 | type: "ReLU" 1367 | bottom: "conv2_12" 1368 | top: "conv2_12" 1369 | } 1370 | layer { 1371 | bottom: "conv2_12" 1372 | top: "conv2_13" 1373 | name: "conv2_13" 1374 | param { 1375 | lr_mult: 1 1376 | decay_mult: 1 1377 | name: "conv2_13" 1378 | } 1379 | param { 1380 | lr_mult: 2 1381 | decay_mult: 0 1382 | name: "conv2_13_b" 1383 | } 1384 | type: "Convolution" 1385 | convolution_param { 1386 | num_output: 256 1387 | pad: 1 1388 | kernel_size: 3 1389 | weight_filler { 1390 | type: "xavier" 1391 | } 1392 | bias_filler { 1393 | type: "constant" 1394 | } 1395 | } 1396 | } 1397 | layer { 1398 | name: "dropout_conv2_13" 1399 | type: "Dropout" 1400 | bottom: "conv2_13" 1401 | top: "conv2_13" 1402 | dropout_param { 1403 | dropout_ratio: 0.2 1404 | } 1405 | } 1406 | layer { 1407 | name: "batch_conv2_13" 1408 | type: "BatchNorm" 1409 | bottom: "conv2_13" 1410 | top: "conv2_13" 1411 | param { lr_mult: 0 } 1412 | param { lr_mult: 0 } 1413 | param { lr_mult: 0 } 1414 | } 1415 | layer { 1416 | name: "relu_conv2_13" 1417 | type: "ReLU" 1418 | bottom: "conv2_13" 1419 | top: "conv2_13" 1420 | } 1421 | layer { 1422 | bottom: "extra_mid_join" 1423 | top: "conv1_6" 1424 | name: "conv1_6" 1425 | param { 1426 | lr_mult: 1 1427 | decay_mult: 1 1428 | name: "conv1_6" 1429 | } 1430 | param { 1431 | lr_mult: 2 1432 | decay_mult: 0 1433 | name: "conv1_6_b" 1434 | } 1435 | type: "Convolution" 1436 | convolution_param { 1437 | num_output: 256 1438 | pad: 1 1439 | kernel_size: 3 1440 | weight_filler { 1441 | type: "xavier" 1442 | } 1443 | bias_filler { 1444 | type: "constant" 1445 | } 1446 | } 1447 | } 1448 | layer { 1449 | name: "dropout_conv1_6" 1450 | type: "Dropout" 1451 | bottom: "conv1_6" 1452 | top: "conv1_6" 1453 | dropout_param { 1454 | dropout_ratio: 0.2 1455 | } 1456 | } 1457 | layer { 1458 | name: "batch_conv1_6" 1459 | type: "BatchNorm" 1460 | bottom: "conv1_6" 1461 | top: "conv1_6" 1462 | param { lr_mult: 0 } 1463 | param { lr_mult: 0 } 1464 | param { lr_mult: 0 } 1465 | } 1466 | layer { 1467 | name: "relu_conv1_6" 1468 | type: "ReLU" 1469 | bottom: "conv1_6" 1470 | top: "conv1_6" 1471 | } 1472 | layer { 1473 | name: "join_conv2_13_plus" 1474 | type: "FractalJoin" 1475 | bottom: "conv1_6" 1476 | bottom: "conv2_13" 1477 | top: "conv2_13_plus" 1478 | fractal_join_param { 1479 | drop_path_ratio: 0.15 1480 | } 1481 | } 1482 | layer { 1483 | bottom: "conv2_13_plus" 1484 | top: "conv2_14" 1485 | name: "conv2_14" 1486 | param { 1487 | lr_mult: 1 1488 | decay_mult: 1 1489 | name: "conv2_14" 1490 | } 1491 | param { 1492 | lr_mult: 2 1493 | decay_mult: 0 1494 | name: "conv2_14_b" 1495 | } 1496 | type: "Convolution" 1497 | convolution_param { 1498 | num_output: 256 1499 | pad: 1 1500 | kernel_size: 3 1501 | weight_filler { 1502 | type: "xavier" 1503 | } 1504 | bias_filler { 1505 | type: "constant" 1506 | } 1507 | } 1508 | } 1509 | layer { 1510 | name: "dropout_conv2_14" 1511 | type: "Dropout" 1512 | bottom: "conv2_14" 1513 | top: "conv2_14" 1514 | dropout_param { 1515 | dropout_ratio: 0.2 1516 | } 1517 | } 1518 | layer { 1519 | name: "batch_conv2_14" 1520 | type: "BatchNorm" 1521 | bottom: "conv2_14" 1522 | top: "conv2_14" 1523 | param { lr_mult: 0 } 1524 | param { lr_mult: 0 } 1525 | param { lr_mult: 0 } 1526 | } 1527 | layer { 1528 | name: "relu_conv2_14" 1529 | type: "ReLU" 1530 | bottom: "conv2_14" 1531 | top: "conv2_14" 1532 | } 1533 | layer { 1534 | bottom: "conv2_14" 1535 | top: "conv2_15" 1536 | name: "conv2_15" 1537 | param { 1538 | lr_mult: 1 1539 | decay_mult: 1 1540 | name: "conv2_15" 1541 | } 1542 | param { 1543 | lr_mult: 2 1544 | decay_mult: 0 1545 | name: "conv2_15_b" 1546 | } 1547 | type: "Convolution" 1548 | convolution_param { 1549 | num_output: 256 1550 | pad: 1 1551 | kernel_size: 3 1552 | weight_filler { 1553 | type: "xavier" 1554 | } 1555 | bias_filler { 1556 | type: "constant" 1557 | } 1558 | } 1559 | } 1560 | layer { 1561 | name: "dropout_conv2_15" 1562 | type: "Dropout" 1563 | bottom: "conv2_15" 1564 | top: "conv2_15" 1565 | dropout_param { 1566 | dropout_ratio: 0.2 1567 | } 1568 | } 1569 | layer { 1570 | name: "batch_conv2_15" 1571 | type: "BatchNorm" 1572 | bottom: "conv2_15" 1573 | top: "conv2_15" 1574 | param { lr_mult: 0 } 1575 | param { lr_mult: 0 } 1576 | param { lr_mult: 0 } 1577 | } 1578 | layer { 1579 | name: "relu_conv2_15" 1580 | type: "ReLU" 1581 | bottom: "conv2_15" 1582 | top: "conv2_15" 1583 | } 1584 | layer { 1585 | bottom: "conv2_13_plus" 1586 | top: "conv1_7" 1587 | name: "conv1_7" 1588 | param { 1589 | lr_mult: 1 1590 | decay_mult: 1 1591 | name: "conv1_7" 1592 | } 1593 | param { 1594 | lr_mult: 2 1595 | decay_mult: 0 1596 | name: "conv1_7_b" 1597 | } 1598 | type: "Convolution" 1599 | convolution_param { 1600 | num_output: 256 1601 | pad: 1 1602 | kernel_size: 3 1603 | weight_filler { 1604 | type: "xavier" 1605 | } 1606 | bias_filler { 1607 | type: "constant" 1608 | } 1609 | } 1610 | } 1611 | layer { 1612 | name: "dropout_conv1_7" 1613 | type: "Dropout" 1614 | bottom: "conv1_7" 1615 | top: "conv1_7" 1616 | dropout_param { 1617 | dropout_ratio: 0.2 1618 | } 1619 | } 1620 | layer { 1621 | name: "batch_conv1_7" 1622 | type: "BatchNorm" 1623 | bottom: "conv1_7" 1624 | top: "conv1_7" 1625 | param { lr_mult: 0 } 1626 | param { lr_mult: 0 } 1627 | param { lr_mult: 0 } 1628 | } 1629 | layer { 1630 | name: "relu_conv1_7" 1631 | type: "ReLU" 1632 | bottom: "conv1_7" 1633 | top: "conv1_7" 1634 | } 1635 | layer { 1636 | bottom: "extra_mid_join" 1637 | top: "conv0_3" 1638 | name: "conv0_3" 1639 | param { 1640 | lr_mult: 1 1641 | decay_mult: 1 1642 | name: "conv0_3" 1643 | } 1644 | param { 1645 | lr_mult: 2 1646 | decay_mult: 0 1647 | name: "conv0_3_b" 1648 | } 1649 | type: "Convolution" 1650 | convolution_param { 1651 | num_output: 256 1652 | pad: 1 1653 | kernel_size: 3 1654 | weight_filler { 1655 | type: "xavier" 1656 | } 1657 | bias_filler { 1658 | type: "constant" 1659 | } 1660 | } 1661 | } 1662 | layer { 1663 | name: "dropout_conv0_3" 1664 | type: "Dropout" 1665 | bottom: "conv0_3" 1666 | top: "conv0_3" 1667 | dropout_param { 1668 | dropout_ratio: 0.2 1669 | } 1670 | } 1671 | layer { 1672 | name: "batch_conv0_3" 1673 | type: "BatchNorm" 1674 | bottom: "conv0_3" 1675 | top: "conv0_3" 1676 | param { lr_mult: 0 } 1677 | param { lr_mult: 0 } 1678 | param { lr_mult: 0 } 1679 | } 1680 | layer { 1681 | name: "relu_conv0_3" 1682 | type: "ReLU" 1683 | bottom: "conv0_3" 1684 | top: "conv0_3" 1685 | } 1686 | layer { 1687 | bottom: "conv0_3" 1688 | top: "pool0_3" 1689 | name: "pool0_3" 1690 | type: "Pooling" 1691 | pooling_param { 1692 | pool: MAX 1693 | kernel_size: 2 1694 | stride: 2 1695 | } 1696 | } 1697 | layer { 1698 | bottom: "conv1_7" 1699 | top: "pool1_7" 1700 | name: "pool1_7" 1701 | type: "Pooling" 1702 | pooling_param { 1703 | pool: MAX 1704 | kernel_size: 2 1705 | stride: 2 1706 | } 1707 | } 1708 | layer { 1709 | bottom: "conv2_15" 1710 | top: "pool2_15" 1711 | name: "pool2_15" 1712 | type: "Pooling" 1713 | pooling_param { 1714 | pool: MAX 1715 | kernel_size: 2 1716 | stride: 2 1717 | } 1718 | } 1719 | layer { 1720 | name: "join_pool2_15_plus" 1721 | type: "FractalJoin" 1722 | bottom: "pool0_3" 1723 | bottom: "pool1_7" 1724 | bottom: "pool2_15" 1725 | top: "pool2_15_plus" 1726 | fractal_join_param { 1727 | drop_path_ratio: 0.15 1728 | } 1729 | } 1730 | # Reduction: 4, spatial size: 2 1731 | layer { 1732 | bottom: "pool2_15_plus" 1733 | top: "conv2_16" 1734 | name: "conv2_16" 1735 | param { 1736 | lr_mult: 1 1737 | decay_mult: 1 1738 | name: "conv2_16" 1739 | } 1740 | param { 1741 | lr_mult: 2 1742 | decay_mult: 0 1743 | name: "conv2_16_b" 1744 | } 1745 | type: "Convolution" 1746 | convolution_param { 1747 | num_output: 512 1748 | pad: 1 1749 | kernel_size: 3 1750 | weight_filler { 1751 | type: "xavier" 1752 | } 1753 | bias_filler { 1754 | type: "constant" 1755 | } 1756 | } 1757 | } 1758 | layer { 1759 | name: "dropout_conv2_16" 1760 | type: "Dropout" 1761 | bottom: "conv2_16" 1762 | top: "conv2_16" 1763 | dropout_param { 1764 | dropout_ratio: 0.3 1765 | } 1766 | } 1767 | layer { 1768 | name: "batch_conv2_16" 1769 | type: "BatchNorm" 1770 | bottom: "conv2_16" 1771 | top: "conv2_16" 1772 | param { lr_mult: 0 } 1773 | param { lr_mult: 0 } 1774 | param { lr_mult: 0 } 1775 | } 1776 | layer { 1777 | name: "relu_conv2_16" 1778 | type: "ReLU" 1779 | bottom: "conv2_16" 1780 | top: "conv2_16" 1781 | } 1782 | layer { 1783 | bottom: "conv2_16" 1784 | top: "conv2_17" 1785 | name: "conv2_17" 1786 | param { 1787 | lr_mult: 1 1788 | decay_mult: 1 1789 | name: "conv2_17" 1790 | } 1791 | param { 1792 | lr_mult: 2 1793 | decay_mult: 0 1794 | name: "conv2_17_b" 1795 | } 1796 | type: "Convolution" 1797 | convolution_param { 1798 | num_output: 512 1799 | pad: 1 1800 | kernel_size: 3 1801 | weight_filler { 1802 | type: "xavier" 1803 | } 1804 | bias_filler { 1805 | type: "constant" 1806 | } 1807 | } 1808 | } 1809 | layer { 1810 | name: "dropout_conv2_17" 1811 | type: "Dropout" 1812 | bottom: "conv2_17" 1813 | top: "conv2_17" 1814 | dropout_param { 1815 | dropout_ratio: 0.3 1816 | } 1817 | } 1818 | layer { 1819 | name: "batch_conv2_17" 1820 | type: "BatchNorm" 1821 | bottom: "conv2_17" 1822 | top: "conv2_17" 1823 | param { lr_mult: 0 } 1824 | param { lr_mult: 0 } 1825 | param { lr_mult: 0 } 1826 | } 1827 | layer { 1828 | name: "relu_conv2_17" 1829 | type: "ReLU" 1830 | bottom: "conv2_17" 1831 | top: "conv2_17" 1832 | } 1833 | layer { 1834 | bottom: "pool2_15_plus" 1835 | top: "conv1_8" 1836 | name: "conv1_8" 1837 | param { 1838 | lr_mult: 1 1839 | decay_mult: 1 1840 | name: "conv1_8" 1841 | } 1842 | param { 1843 | lr_mult: 2 1844 | decay_mult: 0 1845 | name: "conv1_8_b" 1846 | } 1847 | type: "Convolution" 1848 | convolution_param { 1849 | num_output: 512 1850 | pad: 1 1851 | kernel_size: 3 1852 | weight_filler { 1853 | type: "xavier" 1854 | } 1855 | bias_filler { 1856 | type: "constant" 1857 | } 1858 | } 1859 | } 1860 | layer { 1861 | name: "dropout_conv1_8" 1862 | type: "Dropout" 1863 | bottom: "conv1_8" 1864 | top: "conv1_8" 1865 | dropout_param { 1866 | dropout_ratio: 0.3 1867 | } 1868 | } 1869 | layer { 1870 | name: "batch_conv1_8" 1871 | type: "BatchNorm" 1872 | bottom: "conv1_8" 1873 | top: "conv1_8" 1874 | param { lr_mult: 0 } 1875 | param { lr_mult: 0 } 1876 | param { lr_mult: 0 } 1877 | } 1878 | layer { 1879 | name: "relu_conv1_8" 1880 | type: "ReLU" 1881 | bottom: "conv1_8" 1882 | top: "conv1_8" 1883 | } 1884 | layer { 1885 | name: "join_conv2_17_plus" 1886 | type: "FractalJoin" 1887 | bottom: "conv1_8" 1888 | bottom: "conv2_17" 1889 | top: "conv2_17_plus" 1890 | fractal_join_param { 1891 | drop_path_ratio: 0.15 1892 | } 1893 | } 1894 | layer { 1895 | bottom: "conv2_17_plus" 1896 | top: "conv2_18" 1897 | name: "conv2_18" 1898 | param { 1899 | lr_mult: 1 1900 | decay_mult: 1 1901 | name: "conv2_18" 1902 | } 1903 | param { 1904 | lr_mult: 2 1905 | decay_mult: 0 1906 | name: "conv2_18_b" 1907 | } 1908 | type: "Convolution" 1909 | convolution_param { 1910 | num_output: 512 1911 | pad: 1 1912 | kernel_size: 3 1913 | weight_filler { 1914 | type: "xavier" 1915 | } 1916 | bias_filler { 1917 | type: "constant" 1918 | } 1919 | } 1920 | } 1921 | layer { 1922 | name: "dropout_conv2_18" 1923 | type: "Dropout" 1924 | bottom: "conv2_18" 1925 | top: "conv2_18" 1926 | dropout_param { 1927 | dropout_ratio: 0.3 1928 | } 1929 | } 1930 | layer { 1931 | name: "batch_conv2_18" 1932 | type: "BatchNorm" 1933 | bottom: "conv2_18" 1934 | top: "conv2_18" 1935 | param { lr_mult: 0 } 1936 | param { lr_mult: 0 } 1937 | param { lr_mult: 0 } 1938 | } 1939 | layer { 1940 | name: "relu_conv2_18" 1941 | type: "ReLU" 1942 | bottom: "conv2_18" 1943 | top: "conv2_18" 1944 | } 1945 | layer { 1946 | bottom: "conv2_18" 1947 | top: "conv2_19" 1948 | name: "conv2_19" 1949 | param { 1950 | lr_mult: 1 1951 | decay_mult: 1 1952 | name: "conv2_19" 1953 | } 1954 | param { 1955 | lr_mult: 2 1956 | decay_mult: 0 1957 | name: "conv2_19_b" 1958 | } 1959 | type: "Convolution" 1960 | convolution_param { 1961 | num_output: 512 1962 | pad: 1 1963 | kernel_size: 3 1964 | weight_filler { 1965 | type: "xavier" 1966 | } 1967 | bias_filler { 1968 | type: "constant" 1969 | } 1970 | } 1971 | } 1972 | layer { 1973 | name: "dropout_conv2_19" 1974 | type: "Dropout" 1975 | bottom: "conv2_19" 1976 | top: "conv2_19" 1977 | dropout_param { 1978 | dropout_ratio: 0.3 1979 | } 1980 | } 1981 | layer { 1982 | name: "batch_conv2_19" 1983 | type: "BatchNorm" 1984 | bottom: "conv2_19" 1985 | top: "conv2_19" 1986 | param { lr_mult: 0 } 1987 | param { lr_mult: 0 } 1988 | param { lr_mult: 0 } 1989 | } 1990 | layer { 1991 | name: "relu_conv2_19" 1992 | type: "ReLU" 1993 | bottom: "conv2_19" 1994 | top: "conv2_19" 1995 | } 1996 | layer { 1997 | bottom: "conv2_17_plus" 1998 | top: "conv1_9" 1999 | name: "conv1_9" 2000 | param { 2001 | lr_mult: 1 2002 | decay_mult: 1 2003 | name: "conv1_9" 2004 | } 2005 | param { 2006 | lr_mult: 2 2007 | decay_mult: 0 2008 | name: "conv1_9_b" 2009 | } 2010 | type: "Convolution" 2011 | convolution_param { 2012 | num_output: 512 2013 | pad: 1 2014 | kernel_size: 3 2015 | weight_filler { 2016 | type: "xavier" 2017 | } 2018 | bias_filler { 2019 | type: "constant" 2020 | } 2021 | } 2022 | } 2023 | layer { 2024 | name: "dropout_conv1_9" 2025 | type: "Dropout" 2026 | bottom: "conv1_9" 2027 | top: "conv1_9" 2028 | dropout_param { 2029 | dropout_ratio: 0.3 2030 | } 2031 | } 2032 | layer { 2033 | name: "batch_conv1_9" 2034 | type: "BatchNorm" 2035 | bottom: "conv1_9" 2036 | top: "conv1_9" 2037 | param { lr_mult: 0 } 2038 | param { lr_mult: 0 } 2039 | param { lr_mult: 0 } 2040 | } 2041 | layer { 2042 | name: "relu_conv1_9" 2043 | type: "ReLU" 2044 | bottom: "conv1_9" 2045 | top: "conv1_9" 2046 | } 2047 | layer { 2048 | bottom: "pool2_15_plus" 2049 | top: "conv0_4" 2050 | name: "conv0_4" 2051 | param { 2052 | lr_mult: 1 2053 | decay_mult: 1 2054 | name: "conv0_4" 2055 | } 2056 | param { 2057 | lr_mult: 2 2058 | decay_mult: 0 2059 | name: "conv0_4_b" 2060 | } 2061 | type: "Convolution" 2062 | convolution_param { 2063 | num_output: 512 2064 | pad: 1 2065 | kernel_size: 3 2066 | weight_filler { 2067 | type: "xavier" 2068 | } 2069 | bias_filler { 2070 | type: "constant" 2071 | } 2072 | } 2073 | } 2074 | layer { 2075 | name: "dropout_conv0_4" 2076 | type: "Dropout" 2077 | bottom: "conv0_4" 2078 | top: "conv0_4" 2079 | dropout_param { 2080 | dropout_ratio: 0.3 2081 | } 2082 | } 2083 | layer { 2084 | name: "batch_conv0_4" 2085 | type: "BatchNorm" 2086 | bottom: "conv0_4" 2087 | top: "conv0_4" 2088 | param { lr_mult: 0 } 2089 | param { lr_mult: 0 } 2090 | param { lr_mult: 0 } 2091 | } 2092 | layer { 2093 | name: "relu_conv0_4" 2094 | type: "ReLU" 2095 | bottom: "conv0_4" 2096 | top: "conv0_4" 2097 | } 2098 | layer { 2099 | bottom: "conv0_4" 2100 | top: "pool0_4" 2101 | name: "pool0_4" 2102 | type: "Pooling" 2103 | pooling_param { 2104 | pool: MAX 2105 | kernel_size: 2 2106 | stride: 2 2107 | } 2108 | } 2109 | layer { 2110 | bottom: "conv1_9" 2111 | top: "pool1_9" 2112 | name: "pool1_9" 2113 | type: "Pooling" 2114 | pooling_param { 2115 | pool: MAX 2116 | kernel_size: 2 2117 | stride: 2 2118 | } 2119 | } 2120 | layer { 2121 | bottom: "conv2_19" 2122 | top: "pool2_19" 2123 | name: "pool2_19" 2124 | type: "Pooling" 2125 | pooling_param { 2126 | pool: MAX 2127 | kernel_size: 2 2128 | stride: 2 2129 | } 2130 | } 2131 | layer { 2132 | name: "join_pool2_19_plus" 2133 | type: "FractalJoin" 2134 | bottom: "pool0_4" 2135 | bottom: "pool1_9" 2136 | bottom: "pool2_19" 2137 | top: "pool2_19_plus" 2138 | fractal_join_param { 2139 | drop_path_ratio: 0.15 2140 | } 2141 | } 2142 | # Reduction: 5, spatial size: 1 2143 | layer { 2144 | bottom: "extra_mid_join" 2145 | top: "conv2_20" 2146 | name: "conv2_20" 2147 | param { 2148 | lr_mult: 1 2149 | decay_mult: 1 2150 | name: "conv2_20" 2151 | } 2152 | param { 2153 | lr_mult: 2 2154 | decay_mult: 0 2155 | name: "conv2_20_b" 2156 | } 2157 | type: "Convolution" 2158 | convolution_param { 2159 | num_output: 512 2160 | pad: 1 2161 | kernel_size: 3 2162 | weight_filler { 2163 | type: "xavier" 2164 | } 2165 | bias_filler { 2166 | type: "constant" 2167 | } 2168 | } 2169 | } 2170 | layer { 2171 | name: "dropout_conv2_20" 2172 | type: "Dropout" 2173 | bottom: "conv2_20" 2174 | top: "conv2_20" 2175 | dropout_param { 2176 | dropout_ratio: 0.2 2177 | } 2178 | } 2179 | layer { 2180 | name: "batch_conv2_20" 2181 | type: "BatchNorm" 2182 | bottom: "conv2_20" 2183 | top: "conv2_20" 2184 | param { lr_mult: 0 } 2185 | param { lr_mult: 0 } 2186 | param { lr_mult: 0 } 2187 | } 2188 | layer { 2189 | name: "relu_conv2_20" 2190 | type: "ReLU" 2191 | bottom: "conv2_20" 2192 | top: "conv2_20" 2193 | } 2194 | layer { 2195 | bottom: "conv2_20" 2196 | top: "conv2_21" 2197 | name: "conv2_21" 2198 | param { 2199 | lr_mult: 1 2200 | decay_mult: 1 2201 | name: "conv2_21" 2202 | } 2203 | param { 2204 | lr_mult: 2 2205 | decay_mult: 0 2206 | name: "conv2_21_b" 2207 | } 2208 | type: "Convolution" 2209 | convolution_param { 2210 | num_output: 512 2211 | pad: 1 2212 | kernel_size: 3 2213 | weight_filler { 2214 | type: "xavier" 2215 | } 2216 | bias_filler { 2217 | type: "constant" 2218 | } 2219 | } 2220 | } 2221 | layer { 2222 | name: "dropout_conv2_21" 2223 | type: "Dropout" 2224 | bottom: "conv2_21" 2225 | top: "conv2_21" 2226 | dropout_param { 2227 | dropout_ratio: 0.2 2228 | } 2229 | } 2230 | layer { 2231 | name: "batch_conv2_21" 2232 | type: "BatchNorm" 2233 | bottom: "conv2_21" 2234 | top: "conv2_21" 2235 | param { lr_mult: 0 } 2236 | param { lr_mult: 0 } 2237 | param { lr_mult: 0 } 2238 | } 2239 | layer { 2240 | name: "relu_conv2_21" 2241 | type: "ReLU" 2242 | bottom: "conv2_21" 2243 | top: "conv2_21" 2244 | } 2245 | layer { 2246 | bottom: "extra_mid_join" 2247 | top: "conv1_10" 2248 | name: "conv1_10" 2249 | param { 2250 | lr_mult: 1 2251 | decay_mult: 1 2252 | name: "conv1_10" 2253 | } 2254 | param { 2255 | lr_mult: 2 2256 | decay_mult: 0 2257 | name: "conv1_10_b" 2258 | } 2259 | type: "Convolution" 2260 | convolution_param { 2261 | num_output: 512 2262 | pad: 1 2263 | kernel_size: 3 2264 | weight_filler { 2265 | type: "xavier" 2266 | } 2267 | bias_filler { 2268 | type: "constant" 2269 | } 2270 | } 2271 | } 2272 | layer { 2273 | name: "dropout_conv1_10" 2274 | type: "Dropout" 2275 | bottom: "conv1_10" 2276 | top: "conv1_10" 2277 | dropout_param { 2278 | dropout_ratio: 0.2 2279 | } 2280 | } 2281 | layer { 2282 | name: "batch_conv1_10" 2283 | type: "BatchNorm" 2284 | bottom: "conv1_10" 2285 | top: "conv1_10" 2286 | param { lr_mult: 0 } 2287 | param { lr_mult: 0 } 2288 | param { lr_mult: 0 } 2289 | } 2290 | layer { 2291 | name: "relu_conv1_10" 2292 | type: "ReLU" 2293 | bottom: "conv1_10" 2294 | top: "conv1_10" 2295 | } 2296 | layer { 2297 | name: "join_conv2_21_plus" 2298 | type: "FractalJoin" 2299 | bottom: "conv1_10" 2300 | bottom: "conv2_21" 2301 | top: "conv2_21_plus" 2302 | fractal_join_param { 2303 | drop_path_ratio: 0.15 2304 | } 2305 | } 2306 | layer { 2307 | bottom: "conv2_21_plus" 2308 | top: "conv2_22" 2309 | name: "conv2_22" 2310 | param { 2311 | lr_mult: 1 2312 | decay_mult: 1 2313 | name: "conv2_22" 2314 | } 2315 | param { 2316 | lr_mult: 2 2317 | decay_mult: 0 2318 | name: "conv2_22_b" 2319 | } 2320 | type: "Convolution" 2321 | convolution_param { 2322 | num_output: 512 2323 | pad: 1 2324 | kernel_size: 3 2325 | weight_filler { 2326 | type: "xavier" 2327 | } 2328 | bias_filler { 2329 | type: "constant" 2330 | } 2331 | } 2332 | } 2333 | layer { 2334 | name: "dropout_conv2_22" 2335 | type: "Dropout" 2336 | bottom: "conv2_22" 2337 | top: "conv2_22" 2338 | dropout_param { 2339 | dropout_ratio: 0.2 2340 | } 2341 | } 2342 | layer { 2343 | name: "batch_conv2_22" 2344 | type: "BatchNorm" 2345 | bottom: "conv2_22" 2346 | top: "conv2_22" 2347 | param { lr_mult: 0 } 2348 | param { lr_mult: 0 } 2349 | param { lr_mult: 0 } 2350 | } 2351 | layer { 2352 | name: "relu_conv2_22" 2353 | type: "ReLU" 2354 | bottom: "conv2_22" 2355 | top: "conv2_22" 2356 | } 2357 | layer { 2358 | bottom: "conv2_22" 2359 | top: "conv2_23" 2360 | name: "conv2_23" 2361 | param { 2362 | lr_mult: 1 2363 | decay_mult: 1 2364 | name: "conv2_23" 2365 | } 2366 | param { 2367 | lr_mult: 2 2368 | decay_mult: 0 2369 | name: "conv2_23_b" 2370 | } 2371 | type: "Convolution" 2372 | convolution_param { 2373 | num_output: 512 2374 | pad: 1 2375 | kernel_size: 3 2376 | weight_filler { 2377 | type: "xavier" 2378 | } 2379 | bias_filler { 2380 | type: "constant" 2381 | } 2382 | } 2383 | } 2384 | layer { 2385 | name: "dropout_conv2_23" 2386 | type: "Dropout" 2387 | bottom: "conv2_23" 2388 | top: "conv2_23" 2389 | dropout_param { 2390 | dropout_ratio: 0.2 2391 | } 2392 | } 2393 | layer { 2394 | name: "batch_conv2_23" 2395 | type: "BatchNorm" 2396 | bottom: "conv2_23" 2397 | top: "conv2_23" 2398 | param { lr_mult: 0 } 2399 | param { lr_mult: 0 } 2400 | param { lr_mult: 0 } 2401 | } 2402 | layer { 2403 | name: "relu_conv2_23" 2404 | type: "ReLU" 2405 | bottom: "conv2_23" 2406 | top: "conv2_23" 2407 | } 2408 | layer { 2409 | bottom: "conv2_21_plus" 2410 | top: "conv1_11" 2411 | name: "conv1_11" 2412 | param { 2413 | lr_mult: 1 2414 | decay_mult: 1 2415 | name: "conv1_11" 2416 | } 2417 | param { 2418 | lr_mult: 2 2419 | decay_mult: 0 2420 | name: "conv1_11_b" 2421 | } 2422 | type: "Convolution" 2423 | convolution_param { 2424 | num_output: 512 2425 | pad: 1 2426 | kernel_size: 3 2427 | weight_filler { 2428 | type: "xavier" 2429 | } 2430 | bias_filler { 2431 | type: "constant" 2432 | } 2433 | } 2434 | } 2435 | layer { 2436 | name: "dropout_conv1_11" 2437 | type: "Dropout" 2438 | bottom: "conv1_11" 2439 | top: "conv1_11" 2440 | dropout_param { 2441 | dropout_ratio: 0.2 2442 | } 2443 | } 2444 | layer { 2445 | name: "batch_conv1_11" 2446 | type: "BatchNorm" 2447 | bottom: "conv1_11" 2448 | top: "conv1_11" 2449 | param { lr_mult: 0 } 2450 | param { lr_mult: 0 } 2451 | param { lr_mult: 0 } 2452 | } 2453 | layer { 2454 | name: "relu_conv1_11" 2455 | type: "ReLU" 2456 | bottom: "conv1_11" 2457 | top: "conv1_11" 2458 | } 2459 | layer { 2460 | bottom: "extra_mid_join" 2461 | top: "conv0_5" 2462 | name: "conv0_5" 2463 | param { 2464 | lr_mult: 1 2465 | decay_mult: 1 2466 | name: "conv0_5" 2467 | } 2468 | param { 2469 | lr_mult: 2 2470 | decay_mult: 0 2471 | name: "conv0_5_b" 2472 | } 2473 | type: "Convolution" 2474 | convolution_param { 2475 | num_output: 512 2476 | pad: 1 2477 | kernel_size: 3 2478 | weight_filler { 2479 | type: "xavier" 2480 | } 2481 | bias_filler { 2482 | type: "constant" 2483 | } 2484 | } 2485 | } 2486 | layer { 2487 | name: "dropout_conv0_5" 2488 | type: "Dropout" 2489 | bottom: "conv0_5" 2490 | top: "conv0_5" 2491 | dropout_param { 2492 | dropout_ratio: 0.2 2493 | } 2494 | } 2495 | layer { 2496 | name: "batch_conv0_5" 2497 | type: "BatchNorm" 2498 | bottom: "conv0_5" 2499 | top: "conv0_5" 2500 | param { lr_mult: 0 } 2501 | param { lr_mult: 0 } 2502 | param { lr_mult: 0 } 2503 | } 2504 | layer { 2505 | name: "relu_conv0_5" 2506 | type: "ReLU" 2507 | bottom: "conv0_5" 2508 | top: "conv0_5" 2509 | } 2510 | layer { 2511 | bottom: "conv0_5" 2512 | top: "pool0_5_0" 2513 | name: "pool0_5_0" 2514 | type: "Pooling" 2515 | pooling_param { 2516 | pool: MAX 2517 | kernel_size: 2 2518 | stride: 2 2519 | } 2520 | } 2521 | layer { 2522 | bottom: "pool0_5_0" 2523 | top: "pool0_5_1" 2524 | name: "pool0_5_1" 2525 | type: "Pooling" 2526 | pooling_param { 2527 | pool: MAX 2528 | kernel_size: 2 2529 | stride: 2 2530 | } 2531 | } 2532 | layer { 2533 | bottom: "conv1_11" 2534 | top: "pool1_11_0" 2535 | name: "pool1_11_0" 2536 | type: "Pooling" 2537 | pooling_param { 2538 | pool: MAX 2539 | kernel_size: 2 2540 | stride: 2 2541 | } 2542 | } 2543 | layer { 2544 | bottom: "pool1_11_0" 2545 | top: "pool1_11_1" 2546 | name: "pool1_11_1" 2547 | type: "Pooling" 2548 | pooling_param { 2549 | pool: MAX 2550 | kernel_size: 2 2551 | stride: 2 2552 | } 2553 | } 2554 | layer { 2555 | bottom: "conv2_23" 2556 | top: "pool2_23_0" 2557 | name: "pool2_23_0" 2558 | type: "Pooling" 2559 | pooling_param { 2560 | pool: MAX 2561 | kernel_size: 2 2562 | stride: 2 2563 | } 2564 | } 2565 | layer { 2566 | bottom: "pool2_23_0" 2567 | top: "pool2_23_1" 2568 | name: "pool2_23_1" 2569 | type: "Pooling" 2570 | pooling_param { 2571 | pool: MAX 2572 | kernel_size: 2 2573 | stride: 2 2574 | } 2575 | } 2576 | layer { 2577 | name: "join_pool2_23_plus" 2578 | type: "FractalJoin" 2579 | bottom: "pool0_5_1" 2580 | bottom: "pool1_11_1" 2581 | bottom: "pool2_23_1" 2582 | top: "pool2_23_plus" 2583 | fractal_join_param { 2584 | drop_path_ratio: 0.15 2585 | } 2586 | } 2587 | # Reduction: 6, spatial size: 0 2588 | layer { 2589 | bottom: "data" 2590 | top: "conv2_24" 2591 | name: "conv2_24" 2592 | param { 2593 | lr_mult: 1 2594 | decay_mult: 1 2595 | name: "conv2_24" 2596 | } 2597 | param { 2598 | lr_mult: 2 2599 | decay_mult: 0 2600 | name: "conv2_24_b" 2601 | } 2602 | type: "Convolution" 2603 | convolution_param { 2604 | num_output: 512 2605 | pad: 1 2606 | kernel_size: 3 2607 | weight_filler { 2608 | type: "xavier" 2609 | } 2610 | bias_filler { 2611 | type: "constant" 2612 | } 2613 | } 2614 | } 2615 | layer { 2616 | name: "dropout_conv2_24" 2617 | type: "Dropout" 2618 | bottom: "conv2_24" 2619 | top: "conv2_24" 2620 | dropout_param { 2621 | dropout_ratio: 0.0 2622 | } 2623 | } 2624 | layer { 2625 | name: "batch_conv2_24" 2626 | type: "BatchNorm" 2627 | bottom: "conv2_24" 2628 | top: "conv2_24" 2629 | param { lr_mult: 0 } 2630 | param { lr_mult: 0 } 2631 | param { lr_mult: 0 } 2632 | } 2633 | layer { 2634 | name: "relu_conv2_24" 2635 | type: "ReLU" 2636 | bottom: "conv2_24" 2637 | top: "conv2_24" 2638 | } 2639 | layer { 2640 | bottom: "conv2_24" 2641 | top: "conv2_25" 2642 | name: "conv2_25" 2643 | param { 2644 | lr_mult: 1 2645 | decay_mult: 1 2646 | name: "conv2_25" 2647 | } 2648 | param { 2649 | lr_mult: 2 2650 | decay_mult: 0 2651 | name: "conv2_25_b" 2652 | } 2653 | type: "Convolution" 2654 | convolution_param { 2655 | num_output: 512 2656 | pad: 1 2657 | kernel_size: 3 2658 | weight_filler { 2659 | type: "xavier" 2660 | } 2661 | bias_filler { 2662 | type: "constant" 2663 | } 2664 | } 2665 | } 2666 | layer { 2667 | name: "dropout_conv2_25" 2668 | type: "Dropout" 2669 | bottom: "conv2_25" 2670 | top: "conv2_25" 2671 | dropout_param { 2672 | dropout_ratio: 0.0 2673 | } 2674 | } 2675 | layer { 2676 | name: "batch_conv2_25" 2677 | type: "BatchNorm" 2678 | bottom: "conv2_25" 2679 | top: "conv2_25" 2680 | param { lr_mult: 0 } 2681 | param { lr_mult: 0 } 2682 | param { lr_mult: 0 } 2683 | } 2684 | layer { 2685 | name: "relu_conv2_25" 2686 | type: "ReLU" 2687 | bottom: "conv2_25" 2688 | top: "conv2_25" 2689 | } 2690 | layer { 2691 | bottom: "data" 2692 | top: "conv1_12" 2693 | name: "conv1_12" 2694 | param { 2695 | lr_mult: 1 2696 | decay_mult: 1 2697 | name: "conv1_12" 2698 | } 2699 | param { 2700 | lr_mult: 2 2701 | decay_mult: 0 2702 | name: "conv1_12_b" 2703 | } 2704 | type: "Convolution" 2705 | convolution_param { 2706 | num_output: 512 2707 | pad: 1 2708 | kernel_size: 3 2709 | weight_filler { 2710 | type: "xavier" 2711 | } 2712 | bias_filler { 2713 | type: "constant" 2714 | } 2715 | } 2716 | } 2717 | layer { 2718 | name: "dropout_conv1_12" 2719 | type: "Dropout" 2720 | bottom: "conv1_12" 2721 | top: "conv1_12" 2722 | dropout_param { 2723 | dropout_ratio: 0.0 2724 | } 2725 | } 2726 | layer { 2727 | name: "batch_conv1_12" 2728 | type: "BatchNorm" 2729 | bottom: "conv1_12" 2730 | top: "conv1_12" 2731 | param { lr_mult: 0 } 2732 | param { lr_mult: 0 } 2733 | param { lr_mult: 0 } 2734 | } 2735 | layer { 2736 | name: "relu_conv1_12" 2737 | type: "ReLU" 2738 | bottom: "conv1_12" 2739 | top: "conv1_12" 2740 | } 2741 | layer { 2742 | name: "join_conv2_25_plus" 2743 | type: "FractalJoin" 2744 | bottom: "conv1_12" 2745 | bottom: "conv2_25" 2746 | top: "conv2_25_plus" 2747 | fractal_join_param { 2748 | drop_path_ratio: 0.15 2749 | } 2750 | } 2751 | layer { 2752 | bottom: "conv2_25_plus" 2753 | top: "conv2_26" 2754 | name: "conv2_26" 2755 | param { 2756 | lr_mult: 1 2757 | decay_mult: 1 2758 | name: "conv2_26" 2759 | } 2760 | param { 2761 | lr_mult: 2 2762 | decay_mult: 0 2763 | name: "conv2_26_b" 2764 | } 2765 | type: "Convolution" 2766 | convolution_param { 2767 | num_output: 512 2768 | pad: 1 2769 | kernel_size: 3 2770 | weight_filler { 2771 | type: "xavier" 2772 | } 2773 | bias_filler { 2774 | type: "constant" 2775 | } 2776 | } 2777 | } 2778 | layer { 2779 | name: "dropout_conv2_26" 2780 | type: "Dropout" 2781 | bottom: "conv2_26" 2782 | top: "conv2_26" 2783 | dropout_param { 2784 | dropout_ratio: 0.0 2785 | } 2786 | } 2787 | layer { 2788 | name: "batch_conv2_26" 2789 | type: "BatchNorm" 2790 | bottom: "conv2_26" 2791 | top: "conv2_26" 2792 | param { lr_mult: 0 } 2793 | param { lr_mult: 0 } 2794 | param { lr_mult: 0 } 2795 | } 2796 | layer { 2797 | name: "relu_conv2_26" 2798 | type: "ReLU" 2799 | bottom: "conv2_26" 2800 | top: "conv2_26" 2801 | } 2802 | layer { 2803 | bottom: "conv2_26" 2804 | top: "conv2_27" 2805 | name: "conv2_27" 2806 | param { 2807 | lr_mult: 1 2808 | decay_mult: 1 2809 | name: "conv2_27" 2810 | } 2811 | param { 2812 | lr_mult: 2 2813 | decay_mult: 0 2814 | name: "conv2_27_b" 2815 | } 2816 | type: "Convolution" 2817 | convolution_param { 2818 | num_output: 512 2819 | pad: 1 2820 | kernel_size: 3 2821 | weight_filler { 2822 | type: "xavier" 2823 | } 2824 | bias_filler { 2825 | type: "constant" 2826 | } 2827 | } 2828 | } 2829 | layer { 2830 | name: "dropout_conv2_27" 2831 | type: "Dropout" 2832 | bottom: "conv2_27" 2833 | top: "conv2_27" 2834 | dropout_param { 2835 | dropout_ratio: 0.0 2836 | } 2837 | } 2838 | layer { 2839 | name: "batch_conv2_27" 2840 | type: "BatchNorm" 2841 | bottom: "conv2_27" 2842 | top: "conv2_27" 2843 | param { lr_mult: 0 } 2844 | param { lr_mult: 0 } 2845 | param { lr_mult: 0 } 2846 | } 2847 | layer { 2848 | name: "relu_conv2_27" 2849 | type: "ReLU" 2850 | bottom: "conv2_27" 2851 | top: "conv2_27" 2852 | } 2853 | layer { 2854 | bottom: "conv2_25_plus" 2855 | top: "conv1_13" 2856 | name: "conv1_13" 2857 | param { 2858 | lr_mult: 1 2859 | decay_mult: 1 2860 | name: "conv1_13" 2861 | } 2862 | param { 2863 | lr_mult: 2 2864 | decay_mult: 0 2865 | name: "conv1_13_b" 2866 | } 2867 | type: "Convolution" 2868 | convolution_param { 2869 | num_output: 512 2870 | pad: 1 2871 | kernel_size: 3 2872 | weight_filler { 2873 | type: "xavier" 2874 | } 2875 | bias_filler { 2876 | type: "constant" 2877 | } 2878 | } 2879 | } 2880 | layer { 2881 | name: "dropout_conv1_13" 2882 | type: "Dropout" 2883 | bottom: "conv1_13" 2884 | top: "conv1_13" 2885 | dropout_param { 2886 | dropout_ratio: 0.0 2887 | } 2888 | } 2889 | layer { 2890 | name: "batch_conv1_13" 2891 | type: "BatchNorm" 2892 | bottom: "conv1_13" 2893 | top: "conv1_13" 2894 | param { lr_mult: 0 } 2895 | param { lr_mult: 0 } 2896 | param { lr_mult: 0 } 2897 | } 2898 | layer { 2899 | name: "relu_conv1_13" 2900 | type: "ReLU" 2901 | bottom: "conv1_13" 2902 | top: "conv1_13" 2903 | } 2904 | layer { 2905 | bottom: "data" 2906 | top: "conv0_6" 2907 | name: "conv0_6" 2908 | param { 2909 | lr_mult: 1 2910 | decay_mult: 1 2911 | name: "conv0_6" 2912 | } 2913 | param { 2914 | lr_mult: 2 2915 | decay_mult: 0 2916 | name: "conv0_6_b" 2917 | } 2918 | type: "Convolution" 2919 | convolution_param { 2920 | num_output: 512 2921 | pad: 1 2922 | kernel_size: 3 2923 | weight_filler { 2924 | type: "xavier" 2925 | } 2926 | bias_filler { 2927 | type: "constant" 2928 | } 2929 | } 2930 | } 2931 | layer { 2932 | name: "dropout_conv0_6" 2933 | type: "Dropout" 2934 | bottom: "conv0_6" 2935 | top: "conv0_6" 2936 | dropout_param { 2937 | dropout_ratio: 0.0 2938 | } 2939 | } 2940 | layer { 2941 | name: "batch_conv0_6" 2942 | type: "BatchNorm" 2943 | bottom: "conv0_6" 2944 | top: "conv0_6" 2945 | param { lr_mult: 0 } 2946 | param { lr_mult: 0 } 2947 | param { lr_mult: 0 } 2948 | } 2949 | layer { 2950 | name: "relu_conv0_6" 2951 | type: "ReLU" 2952 | bottom: "conv0_6" 2953 | top: "conv0_6" 2954 | } 2955 | layer { 2956 | bottom: "conv0_6" 2957 | top: "pool0_6_0" 2958 | name: "pool0_6_0" 2959 | type: "Pooling" 2960 | pooling_param { 2961 | pool: MAX 2962 | kernel_size: 2 2963 | stride: 2 2964 | } 2965 | } 2966 | layer { 2967 | bottom: "pool0_6_0" 2968 | top: "pool0_6_1" 2969 | name: "pool0_6_1" 2970 | type: "Pooling" 2971 | pooling_param { 2972 | pool: MAX 2973 | kernel_size: 2 2974 | stride: 2 2975 | } 2976 | } 2977 | layer { 2978 | bottom: "pool0_6_1" 2979 | top: "pool0_6_2" 2980 | name: "pool0_6_2" 2981 | type: "Pooling" 2982 | pooling_param { 2983 | pool: MAX 2984 | kernel_size: 2 2985 | stride: 2 2986 | } 2987 | } 2988 | layer { 2989 | bottom: "pool0_6_2" 2990 | top: "pool0_6_3" 2991 | name: "pool0_6_3" 2992 | type: "Pooling" 2993 | pooling_param { 2994 | pool: MAX 2995 | kernel_size: 2 2996 | stride: 2 2997 | } 2998 | } 2999 | layer { 3000 | bottom: "conv1_13" 3001 | top: "pool1_13_0" 3002 | name: "pool1_13_0" 3003 | type: "Pooling" 3004 | pooling_param { 3005 | pool: MAX 3006 | kernel_size: 2 3007 | stride: 2 3008 | } 3009 | } 3010 | layer { 3011 | bottom: "pool1_13_0" 3012 | top: "pool1_13_1" 3013 | name: "pool1_13_1" 3014 | type: "Pooling" 3015 | pooling_param { 3016 | pool: MAX 3017 | kernel_size: 2 3018 | stride: 2 3019 | } 3020 | } 3021 | layer { 3022 | bottom: "pool1_13_1" 3023 | top: "pool1_13_2" 3024 | name: "pool1_13_2" 3025 | type: "Pooling" 3026 | pooling_param { 3027 | pool: MAX 3028 | kernel_size: 2 3029 | stride: 2 3030 | } 3031 | } 3032 | layer { 3033 | bottom: "pool1_13_2" 3034 | top: "pool1_13_3" 3035 | name: "pool1_13_3" 3036 | type: "Pooling" 3037 | pooling_param { 3038 | pool: MAX 3039 | kernel_size: 2 3040 | stride: 2 3041 | } 3042 | } 3043 | layer { 3044 | bottom: "conv2_27" 3045 | top: "pool2_27_0" 3046 | name: "pool2_27_0" 3047 | type: "Pooling" 3048 | pooling_param { 3049 | pool: MAX 3050 | kernel_size: 2 3051 | stride: 2 3052 | } 3053 | } 3054 | layer { 3055 | bottom: "pool2_27_0" 3056 | top: "pool2_27_1" 3057 | name: "pool2_27_1" 3058 | type: "Pooling" 3059 | pooling_param { 3060 | pool: MAX 3061 | kernel_size: 2 3062 | stride: 2 3063 | } 3064 | } 3065 | layer { 3066 | bottom: "pool2_27_1" 3067 | top: "pool2_27_2" 3068 | name: "pool2_27_2" 3069 | type: "Pooling" 3070 | pooling_param { 3071 | pool: MAX 3072 | kernel_size: 2 3073 | stride: 2 3074 | } 3075 | } 3076 | layer { 3077 | bottom: "pool2_27_2" 3078 | top: "pool2_27_3" 3079 | name: "pool2_27_3" 3080 | type: "Pooling" 3081 | pooling_param { 3082 | pool: MAX 3083 | kernel_size: 2 3084 | stride: 2 3085 | } 3086 | } 3087 | layer { 3088 | name: "join_pool2_27_plus" 3089 | type: "FractalJoin" 3090 | bottom: "pool0_6_3" 3091 | bottom: "pool1_13_3" 3092 | bottom: "pool2_27_3" 3093 | top: "pool2_27_plus" 3094 | fractal_join_param { 3095 | drop_path_ratio: 0.15 3096 | } 3097 | } 3098 | layer { 3099 | name: "join_extra_mid_join2" 3100 | type: "FractalJoin" 3101 | bottom: "pool2_23_plus" 3102 | bottom: "pool2_19_plus" 3103 | # bottom: "pool2_27_plus" 3104 | top: "extra_mid_join2a" 3105 | fractal_join_param { 3106 | drop_path_ratio: 0.0 3107 | } 3108 | } 3109 | layer { 3110 | name: "freeze_drop_path" 3111 | type: "FreezeDropPath" 3112 | bottom: "pool2_27_plus" 3113 | bottom: "extra_mid_join2a" 3114 | top: "extra_mid_join2" 3115 | freeze_drop_path_param { 3116 | num_iter_per_cycle: 0 3117 | interval_type: 0 3118 | } 3119 | } 3120 | layer { 3121 | name: "batch_extra_mid_join2" 3122 | type: "BatchNorm" 3123 | bottom: "extra_mid_join2" 3124 | top: "extra_mid_join2" 3125 | param { lr_mult: 0 } 3126 | param { lr_mult: 0 } 3127 | param { lr_mult: 0 } 3128 | } 3129 | 3130 | # Reduction: 7, spatial size: 0 3131 | layer { 3132 | name: "prediction0" 3133 | type: "InnerProduct" 3134 | bottom: "extra_mid_join2" 3135 | top: "prediction0" 3136 | param { 3137 | lr_mult: 1 3138 | decay_mult: 1 3139 | name: "prediction0" 3140 | } 3141 | param { 3142 | lr_mult: 2 3143 | decay_mult: 0 3144 | name: "prediction0_b" 3145 | } 3146 | inner_product_param { 3147 | num_output: 10 3148 | weight_filler { 3149 | type: "xavier" 3150 | } 3151 | bias_filler { 3152 | type: "constant" 3153 | } 3154 | } 3155 | } 3156 | layer { 3157 | name: "loss0" 3158 | type: "SoftmaxWithLoss" 3159 | bottom: "prediction0" 3160 | bottom: "label" 3161 | top: "loss0" 3162 | loss_weight: 1.0 3163 | include: { phase: TRAIN } 3164 | } 3165 | 3166 | layer { 3167 | name: "accuracy_loss0" 3168 | type: "Accuracy" 3169 | bottom: "prediction0" 3170 | bottom: "label" 3171 | top: "accuracy_loss0" 3172 | include: { phase: TEST } 3173 | } 3174 | -------------------------------------------------------------------------------- /TSN-FDP.prototxt: -------------------------------------------------------------------------------- 1 | name: "FractalOfFractalNet" 2 | layer { 3 | name: "cifar" 4 | type: "Data" 5 | top: "data" 6 | top: "label" 7 | include { 8 | phase: TRAIN 9 | } 10 | transform_param { 11 | mirror: true 12 | mean_file: "examples/cifar10/mean.binaryproto" 13 | } 14 | data_param { 15 | source: "examples/cifar10/cifar10_train_lmdb" 16 | batch_size: 25 17 | backend: LMDB 18 | } 19 | } 20 | layer { 21 | name: "cifar" 22 | type: "Data" 23 | top: "data" 24 | top: "label" 25 | include { 26 | phase: TEST 27 | } 28 | transform_param { 29 | mean_file: "examples/cifar10/mean.binaryproto" 30 | mirror: false 31 | # shuffle: true 32 | } 33 | data_param { 34 | source: "examples/cifar10/cifar10_test_lmdb" 35 | batch_size: 10 36 | backend: LMDB 37 | } 38 | } 39 | # Input size: 32 40 | layer { 41 | bottom: "data" 42 | top: "conv2_0" 43 | name: "conv2_0" 44 | param { 45 | lr_mult: 1 46 | decay_mult: 1 47 | name: "conv2_0" 48 | } 49 | param { 50 | lr_mult: 2 51 | decay_mult: 0 52 | name: "conv2_0_b" 53 | } 54 | type: "Convolution" 55 | convolution_param { 56 | num_output: 64 57 | pad: 1 58 | kernel_size: 3 59 | weight_filler { 60 | type: "xavier" 61 | } 62 | bias_filler { 63 | type: "constant" 64 | } 65 | } 66 | } 67 | layer { 68 | name: "dropout_conv2_0" 69 | type: "Dropout" 70 | bottom: "conv2_0" 71 | top: "conv2_0" 72 | dropout_param { 73 | dropout_ratio: 0.0 74 | } 75 | } 76 | layer { 77 | name: "batch_conv2_0" 78 | type: "BatchNorm" 79 | bottom: "conv2_0" 80 | top: "conv2_0" 81 | param { lr_mult: 0 } 82 | param { lr_mult: 0 } 83 | param { lr_mult: 0 } 84 | } 85 | layer { 86 | name: "relu_conv2_0" 87 | type: "ReLU" 88 | bottom: "conv2_0" 89 | top: "conv2_0" 90 | } 91 | layer { 92 | bottom: "conv2_0" 93 | top: "conv2_1" 94 | name: "conv2_1" 95 | param { 96 | lr_mult: 1 97 | decay_mult: 1 98 | name: "conv2_1" 99 | } 100 | param { 101 | lr_mult: 2 102 | decay_mult: 0 103 | name: "conv2_1_b" 104 | } 105 | type: "Convolution" 106 | convolution_param { 107 | num_output: 64 108 | pad: 1 109 | kernel_size: 3 110 | weight_filler { 111 | type: "xavier" 112 | } 113 | bias_filler { 114 | type: "constant" 115 | } 116 | } 117 | } 118 | layer { 119 | name: "dropout_conv2_1" 120 | type: "Dropout" 121 | bottom: "conv2_1" 122 | top: "conv2_1" 123 | dropout_param { 124 | dropout_ratio: 0.0 125 | } 126 | } 127 | layer { 128 | name: "batch_conv2_1" 129 | type: "BatchNorm" 130 | bottom: "conv2_1" 131 | top: "conv2_1" 132 | param { lr_mult: 0 } 133 | param { lr_mult: 0 } 134 | param { lr_mult: 0 } 135 | } 136 | layer { 137 | name: "relu_conv2_1" 138 | type: "ReLU" 139 | bottom: "conv2_1" 140 | top: "conv2_1" 141 | } 142 | layer { 143 | bottom: "data" 144 | top: "conv1_0" 145 | name: "conv1_0" 146 | param { 147 | lr_mult: 1 148 | decay_mult: 1 149 | name: "conv1_0" 150 | } 151 | param { 152 | lr_mult: 2 153 | decay_mult: 0 154 | name: "conv1_0_b" 155 | } 156 | type: "Convolution" 157 | convolution_param { 158 | num_output: 64 159 | pad: 1 160 | kernel_size: 3 161 | weight_filler { 162 | type: "xavier" 163 | } 164 | bias_filler { 165 | type: "constant" 166 | } 167 | } 168 | } 169 | layer { 170 | name: "dropout_conv1_0" 171 | type: "Dropout" 172 | bottom: "conv1_0" 173 | top: "conv1_0" 174 | dropout_param { 175 | dropout_ratio: 0.0 176 | } 177 | } 178 | layer { 179 | name: "batch_conv1_0" 180 | type: "BatchNorm" 181 | bottom: "conv1_0" 182 | top: "conv1_0" 183 | param { lr_mult: 0 } 184 | param { lr_mult: 0 } 185 | param { lr_mult: 0 } 186 | } 187 | layer { 188 | name: "relu_conv1_0" 189 | type: "ReLU" 190 | bottom: "conv1_0" 191 | top: "conv1_0" 192 | } 193 | layer { 194 | name: "join_conv2_1_plus" 195 | type: "FractalJoin" 196 | bottom: "conv1_0" 197 | bottom: "conv2_1" 198 | top: "conv2_1_plus" 199 | fractal_join_param { 200 | drop_path_ratio: 0.15 201 | } 202 | } 203 | layer { 204 | bottom: "conv2_1_plus" 205 | top: "conv2_2" 206 | name: "conv2_2" 207 | param { 208 | lr_mult: 1 209 | decay_mult: 1 210 | name: "conv2_2" 211 | } 212 | param { 213 | lr_mult: 2 214 | decay_mult: 0 215 | name: "conv2_2_b" 216 | } 217 | type: "Convolution" 218 | convolution_param { 219 | num_output: 64 220 | pad: 1 221 | kernel_size: 3 222 | weight_filler { 223 | type: "xavier" 224 | } 225 | bias_filler { 226 | type: "constant" 227 | } 228 | } 229 | } 230 | layer { 231 | name: "dropout_conv2_2" 232 | type: "Dropout" 233 | bottom: "conv2_2" 234 | top: "conv2_2" 235 | dropout_param { 236 | dropout_ratio: 0.0 237 | } 238 | } 239 | layer { 240 | name: "batch_conv2_2" 241 | type: "BatchNorm" 242 | bottom: "conv2_2" 243 | top: "conv2_2" 244 | param { lr_mult: 0 } 245 | param { lr_mult: 0 } 246 | param { lr_mult: 0 } 247 | } 248 | layer { 249 | name: "relu_conv2_2" 250 | type: "ReLU" 251 | bottom: "conv2_2" 252 | top: "conv2_2" 253 | } 254 | layer { 255 | bottom: "conv2_2" 256 | top: "conv2_3" 257 | name: "conv2_3" 258 | param { 259 | lr_mult: 1 260 | decay_mult: 1 261 | name: "conv2_3" 262 | } 263 | param { 264 | lr_mult: 2 265 | decay_mult: 0 266 | name: "conv2_3_b" 267 | } 268 | type: "Convolution" 269 | convolution_param { 270 | num_output: 64 271 | pad: 1 272 | kernel_size: 3 273 | weight_filler { 274 | type: "xavier" 275 | } 276 | bias_filler { 277 | type: "constant" 278 | } 279 | } 280 | } 281 | layer { 282 | name: "dropout_conv2_3" 283 | type: "Dropout" 284 | bottom: "conv2_3" 285 | top: "conv2_3" 286 | dropout_param { 287 | dropout_ratio: 0.0 288 | } 289 | } 290 | layer { 291 | name: "batch_conv2_3" 292 | type: "BatchNorm" 293 | bottom: "conv2_3" 294 | top: "conv2_3" 295 | param { lr_mult: 0 } 296 | param { lr_mult: 0 } 297 | param { lr_mult: 0 } 298 | } 299 | layer { 300 | name: "relu_conv2_3" 301 | type: "ReLU" 302 | bottom: "conv2_3" 303 | top: "conv2_3" 304 | } 305 | layer { 306 | bottom: "conv2_1_plus" 307 | top: "conv1_1" 308 | name: "conv1_1" 309 | param { 310 | lr_mult: 1 311 | decay_mult: 1 312 | name: "conv1_1" 313 | } 314 | param { 315 | lr_mult: 2 316 | decay_mult: 0 317 | name: "conv1_1_b" 318 | } 319 | type: "Convolution" 320 | convolution_param { 321 | num_output: 64 322 | pad: 1 323 | kernel_size: 3 324 | weight_filler { 325 | type: "xavier" 326 | } 327 | bias_filler { 328 | type: "constant" 329 | } 330 | } 331 | } 332 | layer { 333 | name: "dropout_conv1_1" 334 | type: "Dropout" 335 | bottom: "conv1_1" 336 | top: "conv1_1" 337 | dropout_param { 338 | dropout_ratio: 0.0 339 | } 340 | } 341 | layer { 342 | name: "batch_conv1_1" 343 | type: "BatchNorm" 344 | bottom: "conv1_1" 345 | top: "conv1_1" 346 | param { lr_mult: 0 } 347 | param { lr_mult: 0 } 348 | param { lr_mult: 0 } 349 | } 350 | layer { 351 | name: "relu_conv1_1" 352 | type: "ReLU" 353 | bottom: "conv1_1" 354 | top: "conv1_1" 355 | } 356 | layer { 357 | bottom: "data" 358 | top: "conv0_0" 359 | name: "conv0_0" 360 | param { 361 | lr_mult: 1 362 | decay_mult: 1 363 | name: "conv0_0" 364 | } 365 | param { 366 | lr_mult: 2 367 | decay_mult: 0 368 | name: "conv0_0_b" 369 | } 370 | type: "Convolution" 371 | convolution_param { 372 | num_output: 64 373 | pad: 1 374 | kernel_size: 3 375 | weight_filler { 376 | type: "xavier" 377 | } 378 | bias_filler { 379 | type: "constant" 380 | } 381 | } 382 | } 383 | layer { 384 | name: "dropout_conv0_0" 385 | type: "Dropout" 386 | bottom: "conv0_0" 387 | top: "conv0_0" 388 | dropout_param { 389 | dropout_ratio: 0.0 390 | } 391 | } 392 | layer { 393 | name: "batch_conv0_0" 394 | type: "BatchNorm" 395 | bottom: "conv0_0" 396 | top: "conv0_0" 397 | param { lr_mult: 0 } 398 | param { lr_mult: 0 } 399 | param { lr_mult: 0 } 400 | } 401 | layer { 402 | name: "relu_conv0_0" 403 | type: "ReLU" 404 | bottom: "conv0_0" 405 | top: "conv0_0" 406 | } 407 | layer { 408 | bottom: "conv0_0" 409 | top: "pool0_0" 410 | name: "pool0_0" 411 | type: "Pooling" 412 | pooling_param { 413 | pool: MAX 414 | kernel_size: 2 415 | stride: 2 416 | } 417 | } 418 | layer { 419 | bottom: "conv1_1" 420 | top: "pool1_1" 421 | name: "pool1_1" 422 | type: "Pooling" 423 | pooling_param { 424 | pool: MAX 425 | kernel_size: 2 426 | stride: 2 427 | } 428 | } 429 | layer { 430 | bottom: "conv2_3" 431 | top: "pool2_3" 432 | name: "pool2_3" 433 | type: "Pooling" 434 | pooling_param { 435 | pool: MAX 436 | kernel_size: 2 437 | stride: 2 438 | } 439 | } 440 | layer { 441 | name: "join_pool2_3_plus" 442 | type: "FractalJoin" 443 | bottom: "pool0_0" 444 | bottom: "pool1_1" 445 | bottom: "pool2_3" 446 | top: "pool2_3_plus" 447 | fractal_join_param { 448 | drop_path_ratio: 0.15 449 | } 450 | } 451 | # Reduction: 1, spatial size: 16 452 | layer { 453 | bottom: "pool2_3_plus" 454 | top: "conv2_4" 455 | name: "conv2_4" 456 | param { 457 | lr_mult: 1 458 | decay_mult: 1 459 | name: "conv2_4" 460 | } 461 | param { 462 | lr_mult: 2 463 | decay_mult: 0 464 | name: "conv2_4_b" 465 | } 466 | type: "Convolution" 467 | convolution_param { 468 | num_output: 128 469 | pad: 1 470 | kernel_size: 3 471 | weight_filler { 472 | type: "xavier" 473 | } 474 | bias_filler { 475 | type: "constant" 476 | } 477 | } 478 | } 479 | layer { 480 | name: "dropout_conv2_4" 481 | type: "Dropout" 482 | bottom: "conv2_4" 483 | top: "conv2_4" 484 | dropout_param { 485 | dropout_ratio: 0.1 486 | } 487 | } 488 | layer { 489 | name: "batch_conv2_4" 490 | type: "BatchNorm" 491 | bottom: "conv2_4" 492 | top: "conv2_4" 493 | param { lr_mult: 0 } 494 | param { lr_mult: 0 } 495 | param { lr_mult: 0 } 496 | } 497 | layer { 498 | name: "relu_conv2_4" 499 | type: "ReLU" 500 | bottom: "conv2_4" 501 | top: "conv2_4" 502 | } 503 | layer { 504 | bottom: "conv2_4" 505 | top: "conv2_5" 506 | name: "conv2_5" 507 | param { 508 | lr_mult: 1 509 | decay_mult: 1 510 | name: "conv2_5" 511 | } 512 | param { 513 | lr_mult: 2 514 | decay_mult: 0 515 | name: "conv2_5_b" 516 | } 517 | type: "Convolution" 518 | convolution_param { 519 | num_output: 128 520 | pad: 1 521 | kernel_size: 3 522 | weight_filler { 523 | type: "xavier" 524 | } 525 | bias_filler { 526 | type: "constant" 527 | } 528 | } 529 | } 530 | layer { 531 | name: "dropout_conv2_5" 532 | type: "Dropout" 533 | bottom: "conv2_5" 534 | top: "conv2_5" 535 | dropout_param { 536 | dropout_ratio: 0.1 537 | } 538 | } 539 | layer { 540 | name: "batch_conv2_5" 541 | type: "BatchNorm" 542 | bottom: "conv2_5" 543 | top: "conv2_5" 544 | param { lr_mult: 0 } 545 | param { lr_mult: 0 } 546 | param { lr_mult: 0 } 547 | } 548 | layer { 549 | name: "relu_conv2_5" 550 | type: "ReLU" 551 | bottom: "conv2_5" 552 | top: "conv2_5" 553 | } 554 | layer { 555 | bottom: "pool2_3_plus" 556 | top: "conv1_2" 557 | name: "conv1_2" 558 | param { 559 | lr_mult: 1 560 | decay_mult: 1 561 | name: "conv1_2" 562 | } 563 | param { 564 | lr_mult: 2 565 | decay_mult: 0 566 | name: "conv1_2_b" 567 | } 568 | type: "Convolution" 569 | convolution_param { 570 | num_output: 128 571 | pad: 1 572 | kernel_size: 3 573 | weight_filler { 574 | type: "xavier" 575 | } 576 | bias_filler { 577 | type: "constant" 578 | } 579 | } 580 | } 581 | layer { 582 | name: "dropout_conv1_2" 583 | type: "Dropout" 584 | bottom: "conv1_2" 585 | top: "conv1_2" 586 | dropout_param { 587 | dropout_ratio: 0.1 588 | } 589 | } 590 | layer { 591 | name: "batch_conv1_2" 592 | type: "BatchNorm" 593 | bottom: "conv1_2" 594 | top: "conv1_2" 595 | param { lr_mult: 0 } 596 | param { lr_mult: 0 } 597 | param { lr_mult: 0 } 598 | } 599 | layer { 600 | name: "relu_conv1_2" 601 | type: "ReLU" 602 | bottom: "conv1_2" 603 | top: "conv1_2" 604 | } 605 | layer { 606 | name: "join_conv2_5_plus" 607 | type: "FractalJoin" 608 | bottom: "conv1_2" 609 | bottom: "conv2_5" 610 | top: "conv2_5_plus" 611 | fractal_join_param { 612 | drop_path_ratio: 0.15 613 | } 614 | } 615 | layer { 616 | bottom: "conv2_5_plus" 617 | top: "conv2_6" 618 | name: "conv2_6" 619 | param { 620 | lr_mult: 1 621 | decay_mult: 1 622 | name: "conv2_6" 623 | } 624 | param { 625 | lr_mult: 2 626 | decay_mult: 0 627 | name: "conv2_6_b" 628 | } 629 | type: "Convolution" 630 | convolution_param { 631 | num_output: 128 632 | pad: 1 633 | kernel_size: 3 634 | weight_filler { 635 | type: "xavier" 636 | } 637 | bias_filler { 638 | type: "constant" 639 | } 640 | } 641 | } 642 | layer { 643 | name: "dropout_conv2_6" 644 | type: "Dropout" 645 | bottom: "conv2_6" 646 | top: "conv2_6" 647 | dropout_param { 648 | dropout_ratio: 0.1 649 | } 650 | } 651 | layer { 652 | name: "batch_conv2_6" 653 | type: "BatchNorm" 654 | bottom: "conv2_6" 655 | top: "conv2_6" 656 | param { lr_mult: 0 } 657 | param { lr_mult: 0 } 658 | param { lr_mult: 0 } 659 | } 660 | layer { 661 | name: "relu_conv2_6" 662 | type: "ReLU" 663 | bottom: "conv2_6" 664 | top: "conv2_6" 665 | } 666 | layer { 667 | bottom: "conv2_6" 668 | top: "conv2_7" 669 | name: "conv2_7" 670 | param { 671 | lr_mult: 1 672 | decay_mult: 1 673 | name: "conv2_7" 674 | } 675 | param { 676 | lr_mult: 2 677 | decay_mult: 0 678 | name: "conv2_7_b" 679 | } 680 | type: "Convolution" 681 | convolution_param { 682 | num_output: 128 683 | pad: 1 684 | kernel_size: 3 685 | weight_filler { 686 | type: "xavier" 687 | } 688 | bias_filler { 689 | type: "constant" 690 | } 691 | } 692 | } 693 | layer { 694 | name: "dropout_conv2_7" 695 | type: "Dropout" 696 | bottom: "conv2_7" 697 | top: "conv2_7" 698 | dropout_param { 699 | dropout_ratio: 0.1 700 | } 701 | } 702 | layer { 703 | name: "batch_conv2_7" 704 | type: "BatchNorm" 705 | bottom: "conv2_7" 706 | top: "conv2_7" 707 | param { lr_mult: 0 } 708 | param { lr_mult: 0 } 709 | param { lr_mult: 0 } 710 | } 711 | layer { 712 | name: "relu_conv2_7" 713 | type: "ReLU" 714 | bottom: "conv2_7" 715 | top: "conv2_7" 716 | } 717 | layer { 718 | bottom: "conv2_5_plus" 719 | top: "conv1_3" 720 | name: "conv1_3" 721 | param { 722 | lr_mult: 1 723 | decay_mult: 1 724 | name: "conv1_3" 725 | } 726 | param { 727 | lr_mult: 2 728 | decay_mult: 0 729 | name: "conv1_3_b" 730 | } 731 | type: "Convolution" 732 | convolution_param { 733 | num_output: 128 734 | pad: 1 735 | kernel_size: 3 736 | weight_filler { 737 | type: "xavier" 738 | } 739 | bias_filler { 740 | type: "constant" 741 | } 742 | } 743 | } 744 | layer { 745 | name: "dropout_conv1_3" 746 | type: "Dropout" 747 | bottom: "conv1_3" 748 | top: "conv1_3" 749 | dropout_param { 750 | dropout_ratio: 0.1 751 | } 752 | } 753 | layer { 754 | name: "batch_conv1_3" 755 | type: "BatchNorm" 756 | bottom: "conv1_3" 757 | top: "conv1_3" 758 | param { lr_mult: 0 } 759 | param { lr_mult: 0 } 760 | param { lr_mult: 0 } 761 | } 762 | layer { 763 | name: "relu_conv1_3" 764 | type: "ReLU" 765 | bottom: "conv1_3" 766 | top: "conv1_3" 767 | } 768 | layer { 769 | bottom: "pool2_3_plus" 770 | top: "conv0_1" 771 | name: "conv0_1" 772 | param { 773 | lr_mult: 1 774 | decay_mult: 1 775 | name: "conv0_1" 776 | } 777 | param { 778 | lr_mult: 2 779 | decay_mult: 0 780 | name: "conv0_1_b" 781 | } 782 | type: "Convolution" 783 | convolution_param { 784 | num_output: 128 785 | pad: 1 786 | kernel_size: 3 787 | weight_filler { 788 | type: "xavier" 789 | } 790 | bias_filler { 791 | type: "constant" 792 | } 793 | } 794 | } 795 | layer { 796 | name: "dropout_conv0_1" 797 | type: "Dropout" 798 | bottom: "conv0_1" 799 | top: "conv0_1" 800 | dropout_param { 801 | dropout_ratio: 0.1 802 | } 803 | } 804 | layer { 805 | name: "batch_conv0_1" 806 | type: "BatchNorm" 807 | bottom: "conv0_1" 808 | top: "conv0_1" 809 | param { lr_mult: 0 } 810 | param { lr_mult: 0 } 811 | param { lr_mult: 0 } 812 | } 813 | layer { 814 | name: "relu_conv0_1" 815 | type: "ReLU" 816 | bottom: "conv0_1" 817 | top: "conv0_1" 818 | } 819 | layer { 820 | bottom: "conv0_1" 821 | top: "pool0_1" 822 | name: "pool0_1" 823 | type: "Pooling" 824 | pooling_param { 825 | pool: MAX 826 | kernel_size: 2 827 | stride: 2 828 | } 829 | } 830 | layer { 831 | bottom: "conv1_3" 832 | top: "pool1_3" 833 | name: "pool1_3" 834 | type: "Pooling" 835 | pooling_param { 836 | pool: MAX 837 | kernel_size: 2 838 | stride: 2 839 | } 840 | } 841 | layer { 842 | bottom: "conv2_7" 843 | top: "pool2_7" 844 | name: "pool2_7" 845 | type: "Pooling" 846 | pooling_param { 847 | pool: MAX 848 | kernel_size: 2 849 | stride: 2 850 | } 851 | } 852 | layer { 853 | name: "join_pool2_7_plus" 854 | type: "FractalJoin" 855 | bottom: "pool0_1" 856 | bottom: "pool1_3" 857 | bottom: "pool2_7" 858 | top: "pool2_7_plus" 859 | fractal_join_param { 860 | drop_path_ratio: 0.15 861 | } 862 | } 863 | # Reduction: 2, spatial size: 8 864 | layer { 865 | bottom: "data" 866 | top: "conv2_8" 867 | name: "conv2_8" 868 | param { 869 | lr_mult: 1 870 | decay_mult: 1 871 | name: "conv2_8" 872 | } 873 | param { 874 | lr_mult: 2 875 | decay_mult: 0 876 | name: "conv2_8_b" 877 | } 878 | type: "Convolution" 879 | convolution_param { 880 | num_output: 128 881 | pad: 1 882 | kernel_size: 3 883 | weight_filler { 884 | type: "xavier" 885 | } 886 | bias_filler { 887 | type: "constant" 888 | } 889 | } 890 | } 891 | layer { 892 | name: "dropout_conv2_8" 893 | type: "Dropout" 894 | bottom: "conv2_8" 895 | top: "conv2_8" 896 | dropout_param { 897 | dropout_ratio: 0.0 898 | } 899 | } 900 | layer { 901 | name: "batch_conv2_8" 902 | type: "BatchNorm" 903 | bottom: "conv2_8" 904 | top: "conv2_8" 905 | param { lr_mult: 0 } 906 | param { lr_mult: 0 } 907 | param { lr_mult: 0 } 908 | } 909 | layer { 910 | name: "relu_conv2_8" 911 | type: "ReLU" 912 | bottom: "conv2_8" 913 | top: "conv2_8" 914 | } 915 | layer { 916 | bottom: "conv2_8" 917 | top: "conv2_9" 918 | name: "conv2_9" 919 | param { 920 | lr_mult: 1 921 | decay_mult: 1 922 | name: "conv2_9" 923 | } 924 | param { 925 | lr_mult: 2 926 | decay_mult: 0 927 | name: "conv2_9_b" 928 | } 929 | type: "Convolution" 930 | convolution_param { 931 | num_output: 128 932 | pad: 1 933 | kernel_size: 3 934 | weight_filler { 935 | type: "xavier" 936 | } 937 | bias_filler { 938 | type: "constant" 939 | } 940 | } 941 | } 942 | layer { 943 | name: "dropout_conv2_9" 944 | type: "Dropout" 945 | bottom: "conv2_9" 946 | top: "conv2_9" 947 | dropout_param { 948 | dropout_ratio: 0.0 949 | } 950 | } 951 | layer { 952 | name: "batch_conv2_9" 953 | type: "BatchNorm" 954 | bottom: "conv2_9" 955 | top: "conv2_9" 956 | param { lr_mult: 0 } 957 | param { lr_mult: 0 } 958 | param { lr_mult: 0 } 959 | } 960 | layer { 961 | name: "relu_conv2_9" 962 | type: "ReLU" 963 | bottom: "conv2_9" 964 | top: "conv2_9" 965 | } 966 | layer { 967 | bottom: "data" 968 | top: "conv1_4" 969 | name: "conv1_4" 970 | param { 971 | lr_mult: 1 972 | decay_mult: 1 973 | name: "conv1_4" 974 | } 975 | param { 976 | lr_mult: 2 977 | decay_mult: 0 978 | name: "conv1_4_b" 979 | } 980 | type: "Convolution" 981 | convolution_param { 982 | num_output: 128 983 | pad: 1 984 | kernel_size: 3 985 | weight_filler { 986 | type: "xavier" 987 | } 988 | bias_filler { 989 | type: "constant" 990 | } 991 | } 992 | } 993 | layer { 994 | name: "dropout_conv1_4" 995 | type: "Dropout" 996 | bottom: "conv1_4" 997 | top: "conv1_4" 998 | dropout_param { 999 | dropout_ratio: 0.0 1000 | } 1001 | } 1002 | layer { 1003 | name: "batch_conv1_4" 1004 | type: "BatchNorm" 1005 | bottom: "conv1_4" 1006 | top: "conv1_4" 1007 | param { lr_mult: 0 } 1008 | param { lr_mult: 0 } 1009 | param { lr_mult: 0 } 1010 | } 1011 | layer { 1012 | name: "relu_conv1_4" 1013 | type: "ReLU" 1014 | bottom: "conv1_4" 1015 | top: "conv1_4" 1016 | } 1017 | layer { 1018 | name: "join_conv2_9_plus" 1019 | type: "FractalJoin" 1020 | bottom: "conv1_4" 1021 | bottom: "conv2_9" 1022 | top: "conv2_9_plus" 1023 | fractal_join_param { 1024 | drop_path_ratio: 0.15 1025 | } 1026 | } 1027 | layer { 1028 | bottom: "conv2_9_plus" 1029 | top: "conv2_10" 1030 | name: "conv2_10" 1031 | param { 1032 | lr_mult: 1 1033 | decay_mult: 1 1034 | name: "conv2_10" 1035 | } 1036 | param { 1037 | lr_mult: 2 1038 | decay_mult: 0 1039 | name: "conv2_10_b" 1040 | } 1041 | type: "Convolution" 1042 | convolution_param { 1043 | num_output: 128 1044 | pad: 1 1045 | kernel_size: 3 1046 | weight_filler { 1047 | type: "xavier" 1048 | } 1049 | bias_filler { 1050 | type: "constant" 1051 | } 1052 | } 1053 | } 1054 | layer { 1055 | name: "dropout_conv2_10" 1056 | type: "Dropout" 1057 | bottom: "conv2_10" 1058 | top: "conv2_10" 1059 | dropout_param { 1060 | dropout_ratio: 0.0 1061 | } 1062 | } 1063 | layer { 1064 | name: "batch_conv2_10" 1065 | type: "BatchNorm" 1066 | bottom: "conv2_10" 1067 | top: "conv2_10" 1068 | param { lr_mult: 0 } 1069 | param { lr_mult: 0 } 1070 | param { lr_mult: 0 } 1071 | } 1072 | layer { 1073 | name: "relu_conv2_10" 1074 | type: "ReLU" 1075 | bottom: "conv2_10" 1076 | top: "conv2_10" 1077 | } 1078 | layer { 1079 | bottom: "conv2_10" 1080 | top: "conv2_11" 1081 | name: "conv2_11" 1082 | param { 1083 | lr_mult: 1 1084 | decay_mult: 1 1085 | name: "conv2_11" 1086 | } 1087 | param { 1088 | lr_mult: 2 1089 | decay_mult: 0 1090 | name: "conv2_11_b" 1091 | } 1092 | type: "Convolution" 1093 | convolution_param { 1094 | num_output: 128 1095 | pad: 1 1096 | kernel_size: 3 1097 | weight_filler { 1098 | type: "xavier" 1099 | } 1100 | bias_filler { 1101 | type: "constant" 1102 | } 1103 | } 1104 | } 1105 | layer { 1106 | name: "dropout_conv2_11" 1107 | type: "Dropout" 1108 | bottom: "conv2_11" 1109 | top: "conv2_11" 1110 | dropout_param { 1111 | dropout_ratio: 0.0 1112 | } 1113 | } 1114 | layer { 1115 | name: "batch_conv2_11" 1116 | type: "BatchNorm" 1117 | bottom: "conv2_11" 1118 | top: "conv2_11" 1119 | param { lr_mult: 0 } 1120 | param { lr_mult: 0 } 1121 | param { lr_mult: 0 } 1122 | } 1123 | layer { 1124 | name: "relu_conv2_11" 1125 | type: "ReLU" 1126 | bottom: "conv2_11" 1127 | top: "conv2_11" 1128 | } 1129 | layer { 1130 | bottom: "conv2_9_plus" 1131 | top: "conv1_5" 1132 | name: "conv1_5" 1133 | param { 1134 | lr_mult: 1 1135 | decay_mult: 1 1136 | name: "conv1_5" 1137 | } 1138 | param { 1139 | lr_mult: 2 1140 | decay_mult: 0 1141 | name: "conv1_5_b" 1142 | } 1143 | type: "Convolution" 1144 | convolution_param { 1145 | num_output: 128 1146 | pad: 1 1147 | kernel_size: 3 1148 | weight_filler { 1149 | type: "xavier" 1150 | } 1151 | bias_filler { 1152 | type: "constant" 1153 | } 1154 | } 1155 | } 1156 | layer { 1157 | name: "dropout_conv1_5" 1158 | type: "Dropout" 1159 | bottom: "conv1_5" 1160 | top: "conv1_5" 1161 | dropout_param { 1162 | dropout_ratio: 0.0 1163 | } 1164 | } 1165 | layer { 1166 | name: "batch_conv1_5" 1167 | type: "BatchNorm" 1168 | bottom: "conv1_5" 1169 | top: "conv1_5" 1170 | param { lr_mult: 0 } 1171 | param { lr_mult: 0 } 1172 | param { lr_mult: 0 } 1173 | } 1174 | layer { 1175 | name: "relu_conv1_5" 1176 | type: "ReLU" 1177 | bottom: "conv1_5" 1178 | top: "conv1_5" 1179 | } 1180 | layer { 1181 | bottom: "data" 1182 | top: "conv0_2" 1183 | name: "conv0_2" 1184 | param { 1185 | lr_mult: 1 1186 | decay_mult: 1 1187 | name: "conv0_2" 1188 | } 1189 | param { 1190 | lr_mult: 2 1191 | decay_mult: 0 1192 | name: "conv0_2_b" 1193 | } 1194 | type: "Convolution" 1195 | convolution_param { 1196 | num_output: 128 1197 | pad: 1 1198 | kernel_size: 3 1199 | weight_filler { 1200 | type: "xavier" 1201 | } 1202 | bias_filler { 1203 | type: "constant" 1204 | } 1205 | } 1206 | } 1207 | layer { 1208 | name: "dropout_conv0_2" 1209 | type: "Dropout" 1210 | bottom: "conv0_2" 1211 | top: "conv0_2" 1212 | dropout_param { 1213 | dropout_ratio: 0.0 1214 | } 1215 | } 1216 | layer { 1217 | name: "batch_conv0_2" 1218 | type: "BatchNorm" 1219 | bottom: "conv0_2" 1220 | top: "conv0_2" 1221 | param { lr_mult: 0 } 1222 | param { lr_mult: 0 } 1223 | param { lr_mult: 0 } 1224 | } 1225 | layer { 1226 | name: "relu_conv0_2" 1227 | type: "ReLU" 1228 | bottom: "conv0_2" 1229 | top: "conv0_2" 1230 | } 1231 | layer { 1232 | bottom: "conv0_2" 1233 | top: "pool0_2_0" 1234 | name: "pool0_2_0" 1235 | type: "Pooling" 1236 | pooling_param { 1237 | pool: MAX 1238 | kernel_size: 2 1239 | stride: 2 1240 | } 1241 | } 1242 | layer { 1243 | bottom: "pool0_2_0" 1244 | top: "pool0_2_1" 1245 | name: "pool0_2_1" 1246 | type: "Pooling" 1247 | pooling_param { 1248 | pool: MAX 1249 | kernel_size: 2 1250 | stride: 2 1251 | } 1252 | } 1253 | layer { 1254 | bottom: "conv1_5" 1255 | top: "pool1_5_0" 1256 | name: "pool1_5_0" 1257 | type: "Pooling" 1258 | pooling_param { 1259 | pool: MAX 1260 | kernel_size: 2 1261 | stride: 2 1262 | } 1263 | } 1264 | layer { 1265 | bottom: "pool1_5_0" 1266 | top: "pool1_5_1" 1267 | name: "pool1_5_1" 1268 | type: "Pooling" 1269 | pooling_param { 1270 | pool: MAX 1271 | kernel_size: 2 1272 | stride: 2 1273 | } 1274 | } 1275 | layer { 1276 | bottom: "conv2_11" 1277 | top: "pool2_11_0" 1278 | name: "pool2_11_0" 1279 | type: "Pooling" 1280 | pooling_param { 1281 | pool: MAX 1282 | kernel_size: 2 1283 | stride: 2 1284 | } 1285 | } 1286 | layer { 1287 | bottom: "pool2_11_0" 1288 | top: "pool2_11_1" 1289 | name: "pool2_11_1" 1290 | type: "Pooling" 1291 | pooling_param { 1292 | pool: MAX 1293 | kernel_size: 2 1294 | stride: 2 1295 | } 1296 | } 1297 | layer { 1298 | name: "join_pool2_11_plus" 1299 | type: "FractalJoin" 1300 | bottom: "pool0_2_1" 1301 | bottom: "pool1_5_1" 1302 | bottom: "pool2_11_1" 1303 | top: "pool2_11_plus" 1304 | fractal_join_param { 1305 | drop_path_ratio: 0.15 1306 | } 1307 | } 1308 | layer { 1309 | name: "join_extra_mid_join" 1310 | type: "FractalJoin" 1311 | bottom: "pool2_7_plus" 1312 | bottom: "pool2_11_plus" 1313 | top: "extra_mid_join" 1314 | fractal_join_param { 1315 | drop_path_ratio: 0.15 1316 | } 1317 | } 1318 | # Reduction: 3, spatial size: 4 1319 | layer { 1320 | bottom: "extra_mid_join" 1321 | top: "conv2_12" 1322 | name: "conv2_12" 1323 | param { 1324 | lr_mult: 1 1325 | decay_mult: 1 1326 | name: "conv2_12" 1327 | } 1328 | param { 1329 | lr_mult: 2 1330 | decay_mult: 0 1331 | name: "conv2_12_b" 1332 | } 1333 | type: "Convolution" 1334 | convolution_param { 1335 | num_output: 256 1336 | pad: 1 1337 | kernel_size: 3 1338 | weight_filler { 1339 | type: "xavier" 1340 | } 1341 | bias_filler { 1342 | type: "constant" 1343 | } 1344 | } 1345 | } 1346 | layer { 1347 | name: "dropout_conv2_12" 1348 | type: "Dropout" 1349 | bottom: "conv2_12" 1350 | top: "conv2_12" 1351 | dropout_param { 1352 | dropout_ratio: 0.2 1353 | } 1354 | } 1355 | layer { 1356 | name: "batch_conv2_12" 1357 | type: "BatchNorm" 1358 | bottom: "conv2_12" 1359 | top: "conv2_12" 1360 | param { lr_mult: 0 } 1361 | param { lr_mult: 0 } 1362 | param { lr_mult: 0 } 1363 | } 1364 | layer { 1365 | name: "relu_conv2_12" 1366 | type: "ReLU" 1367 | bottom: "conv2_12" 1368 | top: "conv2_12" 1369 | } 1370 | layer { 1371 | bottom: "conv2_12" 1372 | top: "conv2_13" 1373 | name: "conv2_13" 1374 | param { 1375 | lr_mult: 1 1376 | decay_mult: 1 1377 | name: "conv2_13" 1378 | } 1379 | param { 1380 | lr_mult: 2 1381 | decay_mult: 0 1382 | name: "conv2_13_b" 1383 | } 1384 | type: "Convolution" 1385 | convolution_param { 1386 | num_output: 256 1387 | pad: 1 1388 | kernel_size: 3 1389 | weight_filler { 1390 | type: "xavier" 1391 | } 1392 | bias_filler { 1393 | type: "constant" 1394 | } 1395 | } 1396 | } 1397 | layer { 1398 | name: "dropout_conv2_13" 1399 | type: "Dropout" 1400 | bottom: "conv2_13" 1401 | top: "conv2_13" 1402 | dropout_param { 1403 | dropout_ratio: 0.2 1404 | } 1405 | } 1406 | layer { 1407 | name: "batch_conv2_13" 1408 | type: "BatchNorm" 1409 | bottom: "conv2_13" 1410 | top: "conv2_13" 1411 | param { lr_mult: 0 } 1412 | param { lr_mult: 0 } 1413 | param { lr_mult: 0 } 1414 | } 1415 | layer { 1416 | name: "relu_conv2_13" 1417 | type: "ReLU" 1418 | bottom: "conv2_13" 1419 | top: "conv2_13" 1420 | } 1421 | layer { 1422 | bottom: "extra_mid_join" 1423 | top: "conv1_6" 1424 | name: "conv1_6" 1425 | param { 1426 | lr_mult: 1 1427 | decay_mult: 1 1428 | name: "conv1_6" 1429 | } 1430 | param { 1431 | lr_mult: 2 1432 | decay_mult: 0 1433 | name: "conv1_6_b" 1434 | } 1435 | type: "Convolution" 1436 | convolution_param { 1437 | num_output: 256 1438 | pad: 1 1439 | kernel_size: 3 1440 | weight_filler { 1441 | type: "xavier" 1442 | } 1443 | bias_filler { 1444 | type: "constant" 1445 | } 1446 | } 1447 | } 1448 | layer { 1449 | name: "dropout_conv1_6" 1450 | type: "Dropout" 1451 | bottom: "conv1_6" 1452 | top: "conv1_6" 1453 | dropout_param { 1454 | dropout_ratio: 0.2 1455 | } 1456 | } 1457 | layer { 1458 | name: "batch_conv1_6" 1459 | type: "BatchNorm" 1460 | bottom: "conv1_6" 1461 | top: "conv1_6" 1462 | param { lr_mult: 0 } 1463 | param { lr_mult: 0 } 1464 | param { lr_mult: 0 } 1465 | } 1466 | layer { 1467 | name: "relu_conv1_6" 1468 | type: "ReLU" 1469 | bottom: "conv1_6" 1470 | top: "conv1_6" 1471 | } 1472 | layer { 1473 | name: "join_conv2_13_plus" 1474 | type: "FractalJoin" 1475 | bottom: "conv1_6" 1476 | bottom: "conv2_13" 1477 | top: "conv2_13_plus" 1478 | fractal_join_param { 1479 | drop_path_ratio: 0.15 1480 | } 1481 | } 1482 | layer { 1483 | bottom: "conv2_13_plus" 1484 | top: "conv2_14" 1485 | name: "conv2_14" 1486 | param { 1487 | lr_mult: 1 1488 | decay_mult: 1 1489 | name: "conv2_14" 1490 | } 1491 | param { 1492 | lr_mult: 2 1493 | decay_mult: 0 1494 | name: "conv2_14_b" 1495 | } 1496 | type: "Convolution" 1497 | convolution_param { 1498 | num_output: 256 1499 | pad: 1 1500 | kernel_size: 3 1501 | weight_filler { 1502 | type: "xavier" 1503 | } 1504 | bias_filler { 1505 | type: "constant" 1506 | } 1507 | } 1508 | } 1509 | layer { 1510 | name: "dropout_conv2_14" 1511 | type: "Dropout" 1512 | bottom: "conv2_14" 1513 | top: "conv2_14" 1514 | dropout_param { 1515 | dropout_ratio: 0.2 1516 | } 1517 | } 1518 | layer { 1519 | name: "batch_conv2_14" 1520 | type: "BatchNorm" 1521 | bottom: "conv2_14" 1522 | top: "conv2_14" 1523 | param { lr_mult: 0 } 1524 | param { lr_mult: 0 } 1525 | param { lr_mult: 0 } 1526 | } 1527 | layer { 1528 | name: "relu_conv2_14" 1529 | type: "ReLU" 1530 | bottom: "conv2_14" 1531 | top: "conv2_14" 1532 | } 1533 | layer { 1534 | bottom: "conv2_14" 1535 | top: "conv2_15" 1536 | name: "conv2_15" 1537 | param { 1538 | lr_mult: 1 1539 | decay_mult: 1 1540 | name: "conv2_15" 1541 | } 1542 | param { 1543 | lr_mult: 2 1544 | decay_mult: 0 1545 | name: "conv2_15_b" 1546 | } 1547 | type: "Convolution" 1548 | convolution_param { 1549 | num_output: 256 1550 | pad: 1 1551 | kernel_size: 3 1552 | weight_filler { 1553 | type: "xavier" 1554 | } 1555 | bias_filler { 1556 | type: "constant" 1557 | } 1558 | } 1559 | } 1560 | layer { 1561 | name: "dropout_conv2_15" 1562 | type: "Dropout" 1563 | bottom: "conv2_15" 1564 | top: "conv2_15" 1565 | dropout_param { 1566 | dropout_ratio: 0.2 1567 | } 1568 | } 1569 | layer { 1570 | name: "batch_conv2_15" 1571 | type: "BatchNorm" 1572 | bottom: "conv2_15" 1573 | top: "conv2_15" 1574 | param { lr_mult: 0 } 1575 | param { lr_mult: 0 } 1576 | param { lr_mult: 0 } 1577 | } 1578 | layer { 1579 | name: "relu_conv2_15" 1580 | type: "ReLU" 1581 | bottom: "conv2_15" 1582 | top: "conv2_15" 1583 | } 1584 | layer { 1585 | bottom: "conv2_13_plus" 1586 | top: "conv1_7" 1587 | name: "conv1_7" 1588 | param { 1589 | lr_mult: 1 1590 | decay_mult: 1 1591 | name: "conv1_7" 1592 | } 1593 | param { 1594 | lr_mult: 2 1595 | decay_mult: 0 1596 | name: "conv1_7_b" 1597 | } 1598 | type: "Convolution" 1599 | convolution_param { 1600 | num_output: 256 1601 | pad: 1 1602 | kernel_size: 3 1603 | weight_filler { 1604 | type: "xavier" 1605 | } 1606 | bias_filler { 1607 | type: "constant" 1608 | } 1609 | } 1610 | } 1611 | layer { 1612 | name: "dropout_conv1_7" 1613 | type: "Dropout" 1614 | bottom: "conv1_7" 1615 | top: "conv1_7" 1616 | dropout_param { 1617 | dropout_ratio: 0.2 1618 | } 1619 | } 1620 | layer { 1621 | name: "batch_conv1_7" 1622 | type: "BatchNorm" 1623 | bottom: "conv1_7" 1624 | top: "conv1_7" 1625 | param { lr_mult: 0 } 1626 | param { lr_mult: 0 } 1627 | param { lr_mult: 0 } 1628 | } 1629 | layer { 1630 | name: "relu_conv1_7" 1631 | type: "ReLU" 1632 | bottom: "conv1_7" 1633 | top: "conv1_7" 1634 | } 1635 | layer { 1636 | bottom: "extra_mid_join" 1637 | top: "conv0_3" 1638 | name: "conv0_3" 1639 | param { 1640 | lr_mult: 1 1641 | decay_mult: 1 1642 | name: "conv0_3" 1643 | } 1644 | param { 1645 | lr_mult: 2 1646 | decay_mult: 0 1647 | name: "conv0_3_b" 1648 | } 1649 | type: "Convolution" 1650 | convolution_param { 1651 | num_output: 256 1652 | pad: 1 1653 | kernel_size: 3 1654 | weight_filler { 1655 | type: "xavier" 1656 | } 1657 | bias_filler { 1658 | type: "constant" 1659 | } 1660 | } 1661 | } 1662 | layer { 1663 | name: "dropout_conv0_3" 1664 | type: "Dropout" 1665 | bottom: "conv0_3" 1666 | top: "conv0_3" 1667 | dropout_param { 1668 | dropout_ratio: 0.2 1669 | } 1670 | } 1671 | layer { 1672 | name: "batch_conv0_3" 1673 | type: "BatchNorm" 1674 | bottom: "conv0_3" 1675 | top: "conv0_3" 1676 | param { lr_mult: 0 } 1677 | param { lr_mult: 0 } 1678 | param { lr_mult: 0 } 1679 | } 1680 | layer { 1681 | name: "relu_conv0_3" 1682 | type: "ReLU" 1683 | bottom: "conv0_3" 1684 | top: "conv0_3" 1685 | } 1686 | layer { 1687 | bottom: "conv0_3" 1688 | top: "pool0_3" 1689 | name: "pool0_3" 1690 | type: "Pooling" 1691 | pooling_param { 1692 | pool: MAX 1693 | kernel_size: 2 1694 | stride: 2 1695 | } 1696 | } 1697 | layer { 1698 | bottom: "conv1_7" 1699 | top: "pool1_7" 1700 | name: "pool1_7" 1701 | type: "Pooling" 1702 | pooling_param { 1703 | pool: MAX 1704 | kernel_size: 2 1705 | stride: 2 1706 | } 1707 | } 1708 | layer { 1709 | bottom: "conv2_15" 1710 | top: "pool2_15" 1711 | name: "pool2_15" 1712 | type: "Pooling" 1713 | pooling_param { 1714 | pool: MAX 1715 | kernel_size: 2 1716 | stride: 2 1717 | } 1718 | } 1719 | layer { 1720 | name: "join_pool2_15_plus" 1721 | type: "FractalJoin" 1722 | bottom: "pool0_3" 1723 | bottom: "pool1_7" 1724 | bottom: "pool2_15" 1725 | top: "pool2_15_plus" 1726 | fractal_join_param { 1727 | drop_path_ratio: 0.15 1728 | } 1729 | } 1730 | # Reduction: 4, spatial size: 2 1731 | layer { 1732 | bottom: "pool2_15_plus" 1733 | top: "conv2_16" 1734 | name: "conv2_16" 1735 | param { 1736 | lr_mult: 1 1737 | decay_mult: 1 1738 | name: "conv2_16" 1739 | } 1740 | param { 1741 | lr_mult: 2 1742 | decay_mult: 0 1743 | name: "conv2_16_b" 1744 | } 1745 | type: "Convolution" 1746 | convolution_param { 1747 | num_output: 512 1748 | pad: 1 1749 | kernel_size: 3 1750 | weight_filler { 1751 | type: "xavier" 1752 | } 1753 | bias_filler { 1754 | type: "constant" 1755 | } 1756 | } 1757 | } 1758 | layer { 1759 | name: "dropout_conv2_16" 1760 | type: "Dropout" 1761 | bottom: "conv2_16" 1762 | top: "conv2_16" 1763 | dropout_param { 1764 | dropout_ratio: 0.3 1765 | } 1766 | } 1767 | layer { 1768 | name: "batch_conv2_16" 1769 | type: "BatchNorm" 1770 | bottom: "conv2_16" 1771 | top: "conv2_16" 1772 | param { lr_mult: 0 } 1773 | param { lr_mult: 0 } 1774 | param { lr_mult: 0 } 1775 | } 1776 | layer { 1777 | name: "relu_conv2_16" 1778 | type: "ReLU" 1779 | bottom: "conv2_16" 1780 | top: "conv2_16" 1781 | } 1782 | layer { 1783 | bottom: "conv2_16" 1784 | top: "conv2_17" 1785 | name: "conv2_17" 1786 | param { 1787 | lr_mult: 1 1788 | decay_mult: 1 1789 | name: "conv2_17" 1790 | } 1791 | param { 1792 | lr_mult: 2 1793 | decay_mult: 0 1794 | name: "conv2_17_b" 1795 | } 1796 | type: "Convolution" 1797 | convolution_param { 1798 | num_output: 512 1799 | pad: 1 1800 | kernel_size: 3 1801 | weight_filler { 1802 | type: "xavier" 1803 | } 1804 | bias_filler { 1805 | type: "constant" 1806 | } 1807 | } 1808 | } 1809 | layer { 1810 | name: "dropout_conv2_17" 1811 | type: "Dropout" 1812 | bottom: "conv2_17" 1813 | top: "conv2_17" 1814 | dropout_param { 1815 | dropout_ratio: 0.3 1816 | } 1817 | } 1818 | layer { 1819 | name: "batch_conv2_17" 1820 | type: "BatchNorm" 1821 | bottom: "conv2_17" 1822 | top: "conv2_17" 1823 | param { lr_mult: 0 } 1824 | param { lr_mult: 0 } 1825 | param { lr_mult: 0 } 1826 | } 1827 | layer { 1828 | name: "relu_conv2_17" 1829 | type: "ReLU" 1830 | bottom: "conv2_17" 1831 | top: "conv2_17" 1832 | } 1833 | layer { 1834 | bottom: "pool2_15_plus" 1835 | top: "conv1_8" 1836 | name: "conv1_8" 1837 | param { 1838 | lr_mult: 1 1839 | decay_mult: 1 1840 | name: "conv1_8" 1841 | } 1842 | param { 1843 | lr_mult: 2 1844 | decay_mult: 0 1845 | name: "conv1_8_b" 1846 | } 1847 | type: "Convolution" 1848 | convolution_param { 1849 | num_output: 512 1850 | pad: 1 1851 | kernel_size: 3 1852 | weight_filler { 1853 | type: "xavier" 1854 | } 1855 | bias_filler { 1856 | type: "constant" 1857 | } 1858 | } 1859 | } 1860 | layer { 1861 | name: "dropout_conv1_8" 1862 | type: "Dropout" 1863 | bottom: "conv1_8" 1864 | top: "conv1_8" 1865 | dropout_param { 1866 | dropout_ratio: 0.3 1867 | } 1868 | } 1869 | layer { 1870 | name: "batch_conv1_8" 1871 | type: "BatchNorm" 1872 | bottom: "conv1_8" 1873 | top: "conv1_8" 1874 | param { lr_mult: 0 } 1875 | param { lr_mult: 0 } 1876 | param { lr_mult: 0 } 1877 | } 1878 | layer { 1879 | name: "relu_conv1_8" 1880 | type: "ReLU" 1881 | bottom: "conv1_8" 1882 | top: "conv1_8" 1883 | } 1884 | layer { 1885 | name: "join_conv2_17_plus" 1886 | type: "FractalJoin" 1887 | bottom: "conv1_8" 1888 | bottom: "conv2_17" 1889 | top: "conv2_17_plus" 1890 | fractal_join_param { 1891 | drop_path_ratio: 0.15 1892 | } 1893 | } 1894 | layer { 1895 | bottom: "conv2_17_plus" 1896 | top: "conv2_18" 1897 | name: "conv2_18" 1898 | param { 1899 | lr_mult: 1 1900 | decay_mult: 1 1901 | name: "conv2_18" 1902 | } 1903 | param { 1904 | lr_mult: 2 1905 | decay_mult: 0 1906 | name: "conv2_18_b" 1907 | } 1908 | type: "Convolution" 1909 | convolution_param { 1910 | num_output: 512 1911 | pad: 1 1912 | kernel_size: 3 1913 | weight_filler { 1914 | type: "xavier" 1915 | } 1916 | bias_filler { 1917 | type: "constant" 1918 | } 1919 | } 1920 | } 1921 | layer { 1922 | name: "dropout_conv2_18" 1923 | type: "Dropout" 1924 | bottom: "conv2_18" 1925 | top: "conv2_18" 1926 | dropout_param { 1927 | dropout_ratio: 0.3 1928 | } 1929 | } 1930 | layer { 1931 | name: "batch_conv2_18" 1932 | type: "BatchNorm" 1933 | bottom: "conv2_18" 1934 | top: "conv2_18" 1935 | param { lr_mult: 0 } 1936 | param { lr_mult: 0 } 1937 | param { lr_mult: 0 } 1938 | } 1939 | layer { 1940 | name: "relu_conv2_18" 1941 | type: "ReLU" 1942 | bottom: "conv2_18" 1943 | top: "conv2_18" 1944 | } 1945 | layer { 1946 | bottom: "conv2_18" 1947 | top: "conv2_19" 1948 | name: "conv2_19" 1949 | param { 1950 | lr_mult: 1 1951 | decay_mult: 1 1952 | name: "conv2_19" 1953 | } 1954 | param { 1955 | lr_mult: 2 1956 | decay_mult: 0 1957 | name: "conv2_19_b" 1958 | } 1959 | type: "Convolution" 1960 | convolution_param { 1961 | num_output: 512 1962 | pad: 1 1963 | kernel_size: 3 1964 | weight_filler { 1965 | type: "xavier" 1966 | } 1967 | bias_filler { 1968 | type: "constant" 1969 | } 1970 | } 1971 | } 1972 | layer { 1973 | name: "dropout_conv2_19" 1974 | type: "Dropout" 1975 | bottom: "conv2_19" 1976 | top: "conv2_19" 1977 | dropout_param { 1978 | dropout_ratio: 0.3 1979 | } 1980 | } 1981 | layer { 1982 | name: "batch_conv2_19" 1983 | type: "BatchNorm" 1984 | bottom: "conv2_19" 1985 | top: "conv2_19" 1986 | param { lr_mult: 0 } 1987 | param { lr_mult: 0 } 1988 | param { lr_mult: 0 } 1989 | } 1990 | layer { 1991 | name: "relu_conv2_19" 1992 | type: "ReLU" 1993 | bottom: "conv2_19" 1994 | top: "conv2_19" 1995 | } 1996 | layer { 1997 | bottom: "conv2_17_plus" 1998 | top: "conv1_9" 1999 | name: "conv1_9" 2000 | param { 2001 | lr_mult: 1 2002 | decay_mult: 1 2003 | name: "conv1_9" 2004 | } 2005 | param { 2006 | lr_mult: 2 2007 | decay_mult: 0 2008 | name: "conv1_9_b" 2009 | } 2010 | type: "Convolution" 2011 | convolution_param { 2012 | num_output: 512 2013 | pad: 1 2014 | kernel_size: 3 2015 | weight_filler { 2016 | type: "xavier" 2017 | } 2018 | bias_filler { 2019 | type: "constant" 2020 | } 2021 | } 2022 | } 2023 | layer { 2024 | name: "dropout_conv1_9" 2025 | type: "Dropout" 2026 | bottom: "conv1_9" 2027 | top: "conv1_9" 2028 | dropout_param { 2029 | dropout_ratio: 0.3 2030 | } 2031 | } 2032 | layer { 2033 | name: "batch_conv1_9" 2034 | type: "BatchNorm" 2035 | bottom: "conv1_9" 2036 | top: "conv1_9" 2037 | param { lr_mult: 0 } 2038 | param { lr_mult: 0 } 2039 | param { lr_mult: 0 } 2040 | } 2041 | layer { 2042 | name: "relu_conv1_9" 2043 | type: "ReLU" 2044 | bottom: "conv1_9" 2045 | top: "conv1_9" 2046 | } 2047 | layer { 2048 | bottom: "pool2_15_plus" 2049 | top: "conv0_4" 2050 | name: "conv0_4" 2051 | param { 2052 | lr_mult: 1 2053 | decay_mult: 1 2054 | name: "conv0_4" 2055 | } 2056 | param { 2057 | lr_mult: 2 2058 | decay_mult: 0 2059 | name: "conv0_4_b" 2060 | } 2061 | type: "Convolution" 2062 | convolution_param { 2063 | num_output: 512 2064 | pad: 1 2065 | kernel_size: 3 2066 | weight_filler { 2067 | type: "xavier" 2068 | } 2069 | bias_filler { 2070 | type: "constant" 2071 | } 2072 | } 2073 | } 2074 | layer { 2075 | name: "dropout_conv0_4" 2076 | type: "Dropout" 2077 | bottom: "conv0_4" 2078 | top: "conv0_4" 2079 | dropout_param { 2080 | dropout_ratio: 0.3 2081 | } 2082 | } 2083 | layer { 2084 | name: "batch_conv0_4" 2085 | type: "BatchNorm" 2086 | bottom: "conv0_4" 2087 | top: "conv0_4" 2088 | param { lr_mult: 0 } 2089 | param { lr_mult: 0 } 2090 | param { lr_mult: 0 } 2091 | } 2092 | layer { 2093 | name: "relu_conv0_4" 2094 | type: "ReLU" 2095 | bottom: "conv0_4" 2096 | top: "conv0_4" 2097 | } 2098 | layer { 2099 | bottom: "conv0_4" 2100 | top: "pool0_4" 2101 | name: "pool0_4" 2102 | type: "Pooling" 2103 | pooling_param { 2104 | pool: MAX 2105 | kernel_size: 2 2106 | stride: 2 2107 | } 2108 | } 2109 | layer { 2110 | bottom: "conv1_9" 2111 | top: "pool1_9" 2112 | name: "pool1_9" 2113 | type: "Pooling" 2114 | pooling_param { 2115 | pool: MAX 2116 | kernel_size: 2 2117 | stride: 2 2118 | } 2119 | } 2120 | layer { 2121 | bottom: "conv2_19" 2122 | top: "pool2_19" 2123 | name: "pool2_19" 2124 | type: "Pooling" 2125 | pooling_param { 2126 | pool: MAX 2127 | kernel_size: 2 2128 | stride: 2 2129 | } 2130 | } 2131 | layer { 2132 | name: "join_pool2_19_plus" 2133 | type: "FractalJoin" 2134 | bottom: "pool0_4" 2135 | bottom: "pool1_9" 2136 | bottom: "pool2_19" 2137 | top: "pool2_19_plus" 2138 | fractal_join_param { 2139 | drop_path_ratio: 0.15 2140 | } 2141 | } 2142 | # Reduction: 5, spatial size: 1 2143 | layer { 2144 | bottom: "extra_mid_join" 2145 | top: "conv2_20" 2146 | name: "conv2_20" 2147 | param { 2148 | lr_mult: 1 2149 | decay_mult: 1 2150 | name: "conv2_20" 2151 | } 2152 | param { 2153 | lr_mult: 2 2154 | decay_mult: 0 2155 | name: "conv2_20_b" 2156 | } 2157 | type: "Convolution" 2158 | convolution_param { 2159 | num_output: 512 2160 | pad: 1 2161 | kernel_size: 3 2162 | weight_filler { 2163 | type: "xavier" 2164 | } 2165 | bias_filler { 2166 | type: "constant" 2167 | } 2168 | } 2169 | } 2170 | layer { 2171 | name: "dropout_conv2_20" 2172 | type: "Dropout" 2173 | bottom: "conv2_20" 2174 | top: "conv2_20" 2175 | dropout_param { 2176 | dropout_ratio: 0.2 2177 | } 2178 | } 2179 | layer { 2180 | name: "batch_conv2_20" 2181 | type: "BatchNorm" 2182 | bottom: "conv2_20" 2183 | top: "conv2_20" 2184 | param { lr_mult: 0 } 2185 | param { lr_mult: 0 } 2186 | param { lr_mult: 0 } 2187 | } 2188 | layer { 2189 | name: "relu_conv2_20" 2190 | type: "ReLU" 2191 | bottom: "conv2_20" 2192 | top: "conv2_20" 2193 | } 2194 | layer { 2195 | bottom: "conv2_20" 2196 | top: "conv2_21" 2197 | name: "conv2_21" 2198 | param { 2199 | lr_mult: 1 2200 | decay_mult: 1 2201 | name: "conv2_21" 2202 | } 2203 | param { 2204 | lr_mult: 2 2205 | decay_mult: 0 2206 | name: "conv2_21_b" 2207 | } 2208 | type: "Convolution" 2209 | convolution_param { 2210 | num_output: 512 2211 | pad: 1 2212 | kernel_size: 3 2213 | weight_filler { 2214 | type: "xavier" 2215 | } 2216 | bias_filler { 2217 | type: "constant" 2218 | } 2219 | } 2220 | } 2221 | layer { 2222 | name: "dropout_conv2_21" 2223 | type: "Dropout" 2224 | bottom: "conv2_21" 2225 | top: "conv2_21" 2226 | dropout_param { 2227 | dropout_ratio: 0.2 2228 | } 2229 | } 2230 | layer { 2231 | name: "batch_conv2_21" 2232 | type: "BatchNorm" 2233 | bottom: "conv2_21" 2234 | top: "conv2_21" 2235 | param { lr_mult: 0 } 2236 | param { lr_mult: 0 } 2237 | param { lr_mult: 0 } 2238 | } 2239 | layer { 2240 | name: "relu_conv2_21" 2241 | type: "ReLU" 2242 | bottom: "conv2_21" 2243 | top: "conv2_21" 2244 | } 2245 | layer { 2246 | bottom: "extra_mid_join" 2247 | top: "conv1_10" 2248 | name: "conv1_10" 2249 | param { 2250 | lr_mult: 1 2251 | decay_mult: 1 2252 | name: "conv1_10" 2253 | } 2254 | param { 2255 | lr_mult: 2 2256 | decay_mult: 0 2257 | name: "conv1_10_b" 2258 | } 2259 | type: "Convolution" 2260 | convolution_param { 2261 | num_output: 512 2262 | pad: 1 2263 | kernel_size: 3 2264 | weight_filler { 2265 | type: "xavier" 2266 | } 2267 | bias_filler { 2268 | type: "constant" 2269 | } 2270 | } 2271 | } 2272 | layer { 2273 | name: "dropout_conv1_10" 2274 | type: "Dropout" 2275 | bottom: "conv1_10" 2276 | top: "conv1_10" 2277 | dropout_param { 2278 | dropout_ratio: 0.2 2279 | } 2280 | } 2281 | layer { 2282 | name: "batch_conv1_10" 2283 | type: "BatchNorm" 2284 | bottom: "conv1_10" 2285 | top: "conv1_10" 2286 | param { lr_mult: 0 } 2287 | param { lr_mult: 0 } 2288 | param { lr_mult: 0 } 2289 | } 2290 | layer { 2291 | name: "relu_conv1_10" 2292 | type: "ReLU" 2293 | bottom: "conv1_10" 2294 | top: "conv1_10" 2295 | } 2296 | layer { 2297 | name: "join_conv2_21_plus" 2298 | type: "FractalJoin" 2299 | bottom: "conv1_10" 2300 | bottom: "conv2_21" 2301 | top: "conv2_21_plus" 2302 | fractal_join_param { 2303 | drop_path_ratio: 0.15 2304 | } 2305 | } 2306 | layer { 2307 | bottom: "conv2_21_plus" 2308 | top: "conv2_22" 2309 | name: "conv2_22" 2310 | param { 2311 | lr_mult: 1 2312 | decay_mult: 1 2313 | name: "conv2_22" 2314 | } 2315 | param { 2316 | lr_mult: 2 2317 | decay_mult: 0 2318 | name: "conv2_22_b" 2319 | } 2320 | type: "Convolution" 2321 | convolution_param { 2322 | num_output: 512 2323 | pad: 1 2324 | kernel_size: 3 2325 | weight_filler { 2326 | type: "xavier" 2327 | } 2328 | bias_filler { 2329 | type: "constant" 2330 | } 2331 | } 2332 | } 2333 | layer { 2334 | name: "dropout_conv2_22" 2335 | type: "Dropout" 2336 | bottom: "conv2_22" 2337 | top: "conv2_22" 2338 | dropout_param { 2339 | dropout_ratio: 0.2 2340 | } 2341 | } 2342 | layer { 2343 | name: "batch_conv2_22" 2344 | type: "BatchNorm" 2345 | bottom: "conv2_22" 2346 | top: "conv2_22" 2347 | param { lr_mult: 0 } 2348 | param { lr_mult: 0 } 2349 | param { lr_mult: 0 } 2350 | } 2351 | layer { 2352 | name: "relu_conv2_22" 2353 | type: "ReLU" 2354 | bottom: "conv2_22" 2355 | top: "conv2_22" 2356 | } 2357 | layer { 2358 | bottom: "conv2_22" 2359 | top: "conv2_23" 2360 | name: "conv2_23" 2361 | param { 2362 | lr_mult: 1 2363 | decay_mult: 1 2364 | name: "conv2_23" 2365 | } 2366 | param { 2367 | lr_mult: 2 2368 | decay_mult: 0 2369 | name: "conv2_23_b" 2370 | } 2371 | type: "Convolution" 2372 | convolution_param { 2373 | num_output: 512 2374 | pad: 1 2375 | kernel_size: 3 2376 | weight_filler { 2377 | type: "xavier" 2378 | } 2379 | bias_filler { 2380 | type: "constant" 2381 | } 2382 | } 2383 | } 2384 | layer { 2385 | name: "dropout_conv2_23" 2386 | type: "Dropout" 2387 | bottom: "conv2_23" 2388 | top: "conv2_23" 2389 | dropout_param { 2390 | dropout_ratio: 0.2 2391 | } 2392 | } 2393 | layer { 2394 | name: "batch_conv2_23" 2395 | type: "BatchNorm" 2396 | bottom: "conv2_23" 2397 | top: "conv2_23" 2398 | param { lr_mult: 0 } 2399 | param { lr_mult: 0 } 2400 | param { lr_mult: 0 } 2401 | } 2402 | layer { 2403 | name: "relu_conv2_23" 2404 | type: "ReLU" 2405 | bottom: "conv2_23" 2406 | top: "conv2_23" 2407 | } 2408 | layer { 2409 | bottom: "conv2_21_plus" 2410 | top: "conv1_11" 2411 | name: "conv1_11" 2412 | param { 2413 | lr_mult: 1 2414 | decay_mult: 1 2415 | name: "conv1_11" 2416 | } 2417 | param { 2418 | lr_mult: 2 2419 | decay_mult: 0 2420 | name: "conv1_11_b" 2421 | } 2422 | type: "Convolution" 2423 | convolution_param { 2424 | num_output: 512 2425 | pad: 1 2426 | kernel_size: 3 2427 | weight_filler { 2428 | type: "xavier" 2429 | } 2430 | bias_filler { 2431 | type: "constant" 2432 | } 2433 | } 2434 | } 2435 | layer { 2436 | name: "dropout_conv1_11" 2437 | type: "Dropout" 2438 | bottom: "conv1_11" 2439 | top: "conv1_11" 2440 | dropout_param { 2441 | dropout_ratio: 0.2 2442 | } 2443 | } 2444 | layer { 2445 | name: "batch_conv1_11" 2446 | type: "BatchNorm" 2447 | bottom: "conv1_11" 2448 | top: "conv1_11" 2449 | param { lr_mult: 0 } 2450 | param { lr_mult: 0 } 2451 | param { lr_mult: 0 } 2452 | } 2453 | layer { 2454 | name: "relu_conv1_11" 2455 | type: "ReLU" 2456 | bottom: "conv1_11" 2457 | top: "conv1_11" 2458 | } 2459 | layer { 2460 | bottom: "extra_mid_join" 2461 | top: "conv0_5" 2462 | name: "conv0_5" 2463 | param { 2464 | lr_mult: 1 2465 | decay_mult: 1 2466 | name: "conv0_5" 2467 | } 2468 | param { 2469 | lr_mult: 2 2470 | decay_mult: 0 2471 | name: "conv0_5_b" 2472 | } 2473 | type: "Convolution" 2474 | convolution_param { 2475 | num_output: 512 2476 | pad: 1 2477 | kernel_size: 3 2478 | weight_filler { 2479 | type: "xavier" 2480 | } 2481 | bias_filler { 2482 | type: "constant" 2483 | } 2484 | } 2485 | } 2486 | layer { 2487 | name: "dropout_conv0_5" 2488 | type: "Dropout" 2489 | bottom: "conv0_5" 2490 | top: "conv0_5" 2491 | dropout_param { 2492 | dropout_ratio: 0.2 2493 | } 2494 | } 2495 | layer { 2496 | name: "batch_conv0_5" 2497 | type: "BatchNorm" 2498 | bottom: "conv0_5" 2499 | top: "conv0_5" 2500 | param { lr_mult: 0 } 2501 | param { lr_mult: 0 } 2502 | param { lr_mult: 0 } 2503 | } 2504 | layer { 2505 | name: "relu_conv0_5" 2506 | type: "ReLU" 2507 | bottom: "conv0_5" 2508 | top: "conv0_5" 2509 | } 2510 | layer { 2511 | bottom: "conv0_5" 2512 | top: "pool0_5_0" 2513 | name: "pool0_5_0" 2514 | type: "Pooling" 2515 | pooling_param { 2516 | pool: MAX 2517 | kernel_size: 2 2518 | stride: 2 2519 | } 2520 | } 2521 | layer { 2522 | bottom: "pool0_5_0" 2523 | top: "pool0_5_1" 2524 | name: "pool0_5_1" 2525 | type: "Pooling" 2526 | pooling_param { 2527 | pool: MAX 2528 | kernel_size: 2 2529 | stride: 2 2530 | } 2531 | } 2532 | layer { 2533 | bottom: "conv1_11" 2534 | top: "pool1_11_0" 2535 | name: "pool1_11_0" 2536 | type: "Pooling" 2537 | pooling_param { 2538 | pool: MAX 2539 | kernel_size: 2 2540 | stride: 2 2541 | } 2542 | } 2543 | layer { 2544 | bottom: "pool1_11_0" 2545 | top: "pool1_11_1" 2546 | name: "pool1_11_1" 2547 | type: "Pooling" 2548 | pooling_param { 2549 | pool: MAX 2550 | kernel_size: 2 2551 | stride: 2 2552 | } 2553 | } 2554 | layer { 2555 | bottom: "conv2_23" 2556 | top: "pool2_23_0" 2557 | name: "pool2_23_0" 2558 | type: "Pooling" 2559 | pooling_param { 2560 | pool: MAX 2561 | kernel_size: 2 2562 | stride: 2 2563 | } 2564 | } 2565 | layer { 2566 | bottom: "pool2_23_0" 2567 | top: "pool2_23_1" 2568 | name: "pool2_23_1" 2569 | type: "Pooling" 2570 | pooling_param { 2571 | pool: MAX 2572 | kernel_size: 2 2573 | stride: 2 2574 | } 2575 | } 2576 | layer { 2577 | name: "join_pool2_23_plus" 2578 | type: "FractalJoin" 2579 | bottom: "pool0_5_1" 2580 | bottom: "pool1_11_1" 2581 | bottom: "pool2_23_1" 2582 | top: "pool2_23_plus" 2583 | fractal_join_param { 2584 | drop_path_ratio: 0.15 2585 | } 2586 | } 2587 | # Reduction: 6, spatial size: 0 2588 | layer { 2589 | bottom: "data" 2590 | top: "conv2_24" 2591 | name: "conv2_24" 2592 | param { 2593 | lr_mult: 1 2594 | decay_mult: 1 2595 | name: "conv2_24" 2596 | } 2597 | param { 2598 | lr_mult: 2 2599 | decay_mult: 0 2600 | name: "conv2_24_b" 2601 | } 2602 | type: "Convolution" 2603 | convolution_param { 2604 | num_output: 512 2605 | pad: 1 2606 | kernel_size: 3 2607 | weight_filler { 2608 | type: "xavier" 2609 | } 2610 | bias_filler { 2611 | type: "constant" 2612 | } 2613 | } 2614 | } 2615 | layer { 2616 | name: "dropout_conv2_24" 2617 | type: "Dropout" 2618 | bottom: "conv2_24" 2619 | top: "conv2_24" 2620 | dropout_param { 2621 | dropout_ratio: 0.0 2622 | } 2623 | } 2624 | layer { 2625 | name: "batch_conv2_24" 2626 | type: "BatchNorm" 2627 | bottom: "conv2_24" 2628 | top: "conv2_24" 2629 | param { lr_mult: 0 } 2630 | param { lr_mult: 0 } 2631 | param { lr_mult: 0 } 2632 | } 2633 | layer { 2634 | name: "relu_conv2_24" 2635 | type: "ReLU" 2636 | bottom: "conv2_24" 2637 | top: "conv2_24" 2638 | } 2639 | layer { 2640 | bottom: "conv2_24" 2641 | top: "conv2_25" 2642 | name: "conv2_25" 2643 | param { 2644 | lr_mult: 1 2645 | decay_mult: 1 2646 | name: "conv2_25" 2647 | } 2648 | param { 2649 | lr_mult: 2 2650 | decay_mult: 0 2651 | name: "conv2_25_b" 2652 | } 2653 | type: "Convolution" 2654 | convolution_param { 2655 | num_output: 512 2656 | pad: 1 2657 | kernel_size: 3 2658 | weight_filler { 2659 | type: "xavier" 2660 | } 2661 | bias_filler { 2662 | type: "constant" 2663 | } 2664 | } 2665 | } 2666 | layer { 2667 | name: "dropout_conv2_25" 2668 | type: "Dropout" 2669 | bottom: "conv2_25" 2670 | top: "conv2_25" 2671 | dropout_param { 2672 | dropout_ratio: 0.0 2673 | } 2674 | } 2675 | layer { 2676 | name: "batch_conv2_25" 2677 | type: "BatchNorm" 2678 | bottom: "conv2_25" 2679 | top: "conv2_25" 2680 | param { lr_mult: 0 } 2681 | param { lr_mult: 0 } 2682 | param { lr_mult: 0 } 2683 | } 2684 | layer { 2685 | name: "relu_conv2_25" 2686 | type: "ReLU" 2687 | bottom: "conv2_25" 2688 | top: "conv2_25" 2689 | } 2690 | layer { 2691 | bottom: "data" 2692 | top: "conv1_12" 2693 | name: "conv1_12" 2694 | param { 2695 | lr_mult: 1 2696 | decay_mult: 1 2697 | name: "conv1_12" 2698 | } 2699 | param { 2700 | lr_mult: 2 2701 | decay_mult: 0 2702 | name: "conv1_12_b" 2703 | } 2704 | type: "Convolution" 2705 | convolution_param { 2706 | num_output: 512 2707 | pad: 1 2708 | kernel_size: 3 2709 | weight_filler { 2710 | type: "xavier" 2711 | } 2712 | bias_filler { 2713 | type: "constant" 2714 | } 2715 | } 2716 | } 2717 | layer { 2718 | name: "dropout_conv1_12" 2719 | type: "Dropout" 2720 | bottom: "conv1_12" 2721 | top: "conv1_12" 2722 | dropout_param { 2723 | dropout_ratio: 0.0 2724 | } 2725 | } 2726 | layer { 2727 | name: "batch_conv1_12" 2728 | type: "BatchNorm" 2729 | bottom: "conv1_12" 2730 | top: "conv1_12" 2731 | param { lr_mult: 0 } 2732 | param { lr_mult: 0 } 2733 | param { lr_mult: 0 } 2734 | } 2735 | layer { 2736 | name: "relu_conv1_12" 2737 | type: "ReLU" 2738 | bottom: "conv1_12" 2739 | top: "conv1_12" 2740 | } 2741 | layer { 2742 | name: "join_conv2_25_plus" 2743 | type: "FractalJoin" 2744 | bottom: "conv1_12" 2745 | bottom: "conv2_25" 2746 | top: "conv2_25_plus" 2747 | fractal_join_param { 2748 | drop_path_ratio: 0.15 2749 | } 2750 | } 2751 | layer { 2752 | bottom: "conv2_25_plus" 2753 | top: "conv2_26" 2754 | name: "conv2_26" 2755 | param { 2756 | lr_mult: 1 2757 | decay_mult: 1 2758 | name: "conv2_26" 2759 | } 2760 | param { 2761 | lr_mult: 2 2762 | decay_mult: 0 2763 | name: "conv2_26_b" 2764 | } 2765 | type: "Convolution" 2766 | convolution_param { 2767 | num_output: 512 2768 | pad: 1 2769 | kernel_size: 3 2770 | weight_filler { 2771 | type: "xavier" 2772 | } 2773 | bias_filler { 2774 | type: "constant" 2775 | } 2776 | } 2777 | } 2778 | layer { 2779 | name: "dropout_conv2_26" 2780 | type: "Dropout" 2781 | bottom: "conv2_26" 2782 | top: "conv2_26" 2783 | dropout_param { 2784 | dropout_ratio: 0.0 2785 | } 2786 | } 2787 | layer { 2788 | name: "batch_conv2_26" 2789 | type: "BatchNorm" 2790 | bottom: "conv2_26" 2791 | top: "conv2_26" 2792 | param { lr_mult: 0 } 2793 | param { lr_mult: 0 } 2794 | param { lr_mult: 0 } 2795 | } 2796 | layer { 2797 | name: "relu_conv2_26" 2798 | type: "ReLU" 2799 | bottom: "conv2_26" 2800 | top: "conv2_26" 2801 | } 2802 | layer { 2803 | bottom: "conv2_26" 2804 | top: "conv2_27" 2805 | name: "conv2_27" 2806 | param { 2807 | lr_mult: 1 2808 | decay_mult: 1 2809 | name: "conv2_27" 2810 | } 2811 | param { 2812 | lr_mult: 2 2813 | decay_mult: 0 2814 | name: "conv2_27_b" 2815 | } 2816 | type: "Convolution" 2817 | convolution_param { 2818 | num_output: 512 2819 | pad: 1 2820 | kernel_size: 3 2821 | weight_filler { 2822 | type: "xavier" 2823 | } 2824 | bias_filler { 2825 | type: "constant" 2826 | } 2827 | } 2828 | } 2829 | layer { 2830 | name: "dropout_conv2_27" 2831 | type: "Dropout" 2832 | bottom: "conv2_27" 2833 | top: "conv2_27" 2834 | dropout_param { 2835 | dropout_ratio: 0.0 2836 | } 2837 | } 2838 | layer { 2839 | name: "batch_conv2_27" 2840 | type: "BatchNorm" 2841 | bottom: "conv2_27" 2842 | top: "conv2_27" 2843 | param { lr_mult: 0 } 2844 | param { lr_mult: 0 } 2845 | param { lr_mult: 0 } 2846 | } 2847 | layer { 2848 | name: "relu_conv2_27" 2849 | type: "ReLU" 2850 | bottom: "conv2_27" 2851 | top: "conv2_27" 2852 | } 2853 | layer { 2854 | bottom: "conv2_25_plus" 2855 | top: "conv1_13" 2856 | name: "conv1_13" 2857 | param { 2858 | lr_mult: 1 2859 | decay_mult: 1 2860 | name: "conv1_13" 2861 | } 2862 | param { 2863 | lr_mult: 2 2864 | decay_mult: 0 2865 | name: "conv1_13_b" 2866 | } 2867 | type: "Convolution" 2868 | convolution_param { 2869 | num_output: 512 2870 | pad: 1 2871 | kernel_size: 3 2872 | weight_filler { 2873 | type: "xavier" 2874 | } 2875 | bias_filler { 2876 | type: "constant" 2877 | } 2878 | } 2879 | } 2880 | layer { 2881 | name: "dropout_conv1_13" 2882 | type: "Dropout" 2883 | bottom: "conv1_13" 2884 | top: "conv1_13" 2885 | dropout_param { 2886 | dropout_ratio: 0.0 2887 | } 2888 | } 2889 | layer { 2890 | name: "batch_conv1_13" 2891 | type: "BatchNorm" 2892 | bottom: "conv1_13" 2893 | top: "conv1_13" 2894 | param { lr_mult: 0 } 2895 | param { lr_mult: 0 } 2896 | param { lr_mult: 0 } 2897 | } 2898 | layer { 2899 | name: "relu_conv1_13" 2900 | type: "ReLU" 2901 | bottom: "conv1_13" 2902 | top: "conv1_13" 2903 | } 2904 | layer { 2905 | bottom: "data" 2906 | top: "conv0_6" 2907 | name: "conv0_6" 2908 | param { 2909 | lr_mult: 1 2910 | decay_mult: 1 2911 | name: "conv0_6" 2912 | } 2913 | param { 2914 | lr_mult: 2 2915 | decay_mult: 0 2916 | name: "conv0_6_b" 2917 | } 2918 | type: "Convolution" 2919 | convolution_param { 2920 | num_output: 512 2921 | pad: 1 2922 | kernel_size: 3 2923 | weight_filler { 2924 | type: "xavier" 2925 | } 2926 | bias_filler { 2927 | type: "constant" 2928 | } 2929 | } 2930 | } 2931 | layer { 2932 | name: "dropout_conv0_6" 2933 | type: "Dropout" 2934 | bottom: "conv0_6" 2935 | top: "conv0_6" 2936 | dropout_param { 2937 | dropout_ratio: 0.0 2938 | } 2939 | } 2940 | layer { 2941 | name: "batch_conv0_6" 2942 | type: "BatchNorm" 2943 | bottom: "conv0_6" 2944 | top: "conv0_6" 2945 | param { lr_mult: 0 } 2946 | param { lr_mult: 0 } 2947 | param { lr_mult: 0 } 2948 | } 2949 | layer { 2950 | name: "relu_conv0_6" 2951 | type: "ReLU" 2952 | bottom: "conv0_6" 2953 | top: "conv0_6" 2954 | } 2955 | layer { 2956 | bottom: "conv0_6" 2957 | top: "pool0_6_0" 2958 | name: "pool0_6_0" 2959 | type: "Pooling" 2960 | pooling_param { 2961 | pool: MAX 2962 | kernel_size: 2 2963 | stride: 2 2964 | } 2965 | } 2966 | layer { 2967 | bottom: "pool0_6_0" 2968 | top: "pool0_6_1" 2969 | name: "pool0_6_1" 2970 | type: "Pooling" 2971 | pooling_param { 2972 | pool: MAX 2973 | kernel_size: 2 2974 | stride: 2 2975 | } 2976 | } 2977 | layer { 2978 | bottom: "pool0_6_1" 2979 | top: "pool0_6_2" 2980 | name: "pool0_6_2" 2981 | type: "Pooling" 2982 | pooling_param { 2983 | pool: MAX 2984 | kernel_size: 2 2985 | stride: 2 2986 | } 2987 | } 2988 | layer { 2989 | bottom: "pool0_6_2" 2990 | top: "pool0_6_3" 2991 | name: "pool0_6_3" 2992 | type: "Pooling" 2993 | pooling_param { 2994 | pool: MAX 2995 | kernel_size: 2 2996 | stride: 2 2997 | } 2998 | } 2999 | layer { 3000 | bottom: "conv1_13" 3001 | top: "pool1_13_0" 3002 | name: "pool1_13_0" 3003 | type: "Pooling" 3004 | pooling_param { 3005 | pool: MAX 3006 | kernel_size: 2 3007 | stride: 2 3008 | } 3009 | } 3010 | layer { 3011 | bottom: "pool1_13_0" 3012 | top: "pool1_13_1" 3013 | name: "pool1_13_1" 3014 | type: "Pooling" 3015 | pooling_param { 3016 | pool: MAX 3017 | kernel_size: 2 3018 | stride: 2 3019 | } 3020 | } 3021 | layer { 3022 | bottom: "pool1_13_1" 3023 | top: "pool1_13_2" 3024 | name: "pool1_13_2" 3025 | type: "Pooling" 3026 | pooling_param { 3027 | pool: MAX 3028 | kernel_size: 2 3029 | stride: 2 3030 | } 3031 | } 3032 | layer { 3033 | bottom: "pool1_13_2" 3034 | top: "pool1_13_3" 3035 | name: "pool1_13_3" 3036 | type: "Pooling" 3037 | pooling_param { 3038 | pool: MAX 3039 | kernel_size: 2 3040 | stride: 2 3041 | } 3042 | } 3043 | layer { 3044 | bottom: "conv2_27" 3045 | top: "pool2_27_0" 3046 | name: "pool2_27_0" 3047 | type: "Pooling" 3048 | pooling_param { 3049 | pool: MAX 3050 | kernel_size: 2 3051 | stride: 2 3052 | } 3053 | } 3054 | layer { 3055 | bottom: "pool2_27_0" 3056 | top: "pool2_27_1" 3057 | name: "pool2_27_1" 3058 | type: "Pooling" 3059 | pooling_param { 3060 | pool: MAX 3061 | kernel_size: 2 3062 | stride: 2 3063 | } 3064 | } 3065 | layer { 3066 | bottom: "pool2_27_1" 3067 | top: "pool2_27_2" 3068 | name: "pool2_27_2" 3069 | type: "Pooling" 3070 | pooling_param { 3071 | pool: MAX 3072 | kernel_size: 2 3073 | stride: 2 3074 | } 3075 | } 3076 | layer { 3077 | bottom: "pool2_27_2" 3078 | top: "pool2_27_3" 3079 | name: "pool2_27_3" 3080 | type: "Pooling" 3081 | pooling_param { 3082 | pool: MAX 3083 | kernel_size: 2 3084 | stride: 2 3085 | } 3086 | } 3087 | layer { 3088 | name: "join_pool2_27_plus" 3089 | type: "FractalJoin" 3090 | bottom: "pool0_6_3" 3091 | bottom: "pool1_13_3" 3092 | bottom: "pool2_27_3" 3093 | top: "pool2_27_plus" 3094 | fractal_join_param { 3095 | drop_path_ratio: 0.15 3096 | } 3097 | } 3098 | layer { 3099 | name: "join_extra_mid_join2" 3100 | type: "FractalJoin" 3101 | bottom: "pool2_23_plus" 3102 | bottom: "pool2_19_plus" 3103 | # bottom: "pool2_27_plus" 3104 | top: "extra_mid_join2a" 3105 | fractal_join_param { 3106 | drop_path_ratio: 0.0 3107 | } 3108 | } 3109 | layer { 3110 | name: "squared2_23" 3111 | type: "Eltwise" 3112 | bottom: "extra_mid_join2a" 3113 | bottom: "extra_mid_join2a" 3114 | top: "squared2_23" 3115 | eltwise_param { 3116 | operation: PROD 3117 | } 3118 | } 3119 | layer { 3120 | name: "freeze_drop_path" 3121 | type: "FreezeDropPath" 3122 | bottom: "pool2_27_plus" 3123 | bottom: "squared2_23" 3124 | # bottom: "cubed2_19" 3125 | top: "extra_mid_join2" 3126 | freeze_drop_path_param { 3127 | num_iter_per_cycle: 0 3128 | interval_type: 0 3129 | } 3130 | } 3131 | layer { 3132 | name: "batch_extra_mid_join2" 3133 | type: "BatchNorm" 3134 | bottom: "extra_mid_join2" 3135 | top: "extra_mid_join2" 3136 | param { lr_mult: 0 } 3137 | param { lr_mult: 0 } 3138 | param { lr_mult: 0 } 3139 | } 3140 | 3141 | # Reduction: 7, spatial size: 0 3142 | layer { 3143 | name: "prediction0" 3144 | type: "InnerProduct" 3145 | bottom: "extra_mid_join2" 3146 | top: "prediction0" 3147 | param { 3148 | lr_mult: 1 3149 | decay_mult: 1 3150 | name: "prediction0" 3151 | } 3152 | param { 3153 | lr_mult: 2 3154 | decay_mult: 0 3155 | name: "prediction0_b" 3156 | } 3157 | inner_product_param { 3158 | num_output: 10 3159 | weight_filler { 3160 | type: "xavier" 3161 | } 3162 | bias_filler { 3163 | type: "constant" 3164 | } 3165 | } 3166 | } 3167 | layer { 3168 | name: "loss0" 3169 | type: "SoftmaxWithLoss" 3170 | bottom: "prediction0" 3171 | bottom: "label" 3172 | top: "loss0" 3173 | loss_weight: 1.0 3174 | include: { phase: TRAIN } 3175 | } 3176 | 3177 | layer { 3178 | name: "accuracy_loss0" 3179 | type: "Accuracy" 3180 | bottom: "prediction0" 3181 | bottom: "label" 3182 | top: "accuracy_loss0" 3183 | include: { phase: TEST } 3184 | } 3185 | -------------------------------------------------------------------------------- /freeze_drop_path.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "caffe/layers/freeze_drop_path.hpp" 4 | #include "caffe/util/math_functions.hpp" 5 | 6 | namespace caffe { 7 | 8 | template 9 | void FreezeDropPathLayer::LayerSetUp(const vector*>& bottom, 10 | const vector*>& top) { 11 | Dtype num_iter_per_cycle = this->layer_param_.freeze_drop_path_param().num_iter_per_cycle(); 12 | Dtype interval_type = this->layer_param_.freeze_drop_path_param().interval_type(); 13 | 14 | thresholds_.clear(); 15 | uint_thresholds_.clear(); 16 | stochastic = false; 17 | if ( num_iter_per_cycle == 0) stochastic = true; 18 | iteration = 0; 19 | lastBranch = 0; 20 | 21 | Dtype interval[20]; 22 | Dtype norm = 0.0; 23 | interval[0] = 0.0; 24 | Dtype x; 25 | if (interval_type == 0) { 26 | for (int i = 1; i <= bottom.size(); ++i) { 27 | x = pow(Dtype(i),2.0); 28 | interval[i] += interval[i-1] + x; 29 | norm += x; 30 | } 31 | } else if(interval_type == 1) { 32 | for (int i = 1; i <= bottom.size(); ++i) { 33 | interval[i] = i; 34 | } 35 | norm = bottom.size(); 36 | } else if(interval_type == 2) { 37 | for (int i = 0; i <= bottom.size(); ++i) { 38 | x = pow(Dtype(bottom.size()-i),2.0); 39 | interval[i] += interval[i] + x; 40 | norm += x; 41 | } 42 | } else { 43 | // Not recognized 44 | LOG(ERROR) << " interval_type " << interval_type << " not recognized. "; 45 | } 46 | 47 | 48 | // LOG(INFO) << " num_iter_per_cycle = " << num_iter_per_cycle << " bottom.size = " << bottom.size() << " norm = " << norm; 49 | 50 | 51 | if (stochastic) { 52 | for (int i = 0; i <= bottom.size(); ++i) { 53 | thresholds_.push_back(interval[i]/norm); 54 | DCHECK(thresholds_[i] >= 0.); 55 | DCHECK(thresholds_[i] <= 1.); 56 | uint_thresholds_.push_back(static_cast(interval[i])); 57 | } 58 | } else { 59 | for (int i = 0; i <= bottom.size(); ++i) { 60 | x = Dtype(num_iter_per_cycle * interval[i])/norm; 61 | thresholds_.push_back(x); 62 | } 63 | } 64 | } 65 | 66 | template 67 | void FreezeDropPathLayer::Reshape(const vector*>& bottom, 68 | const vector*>& top) { 69 | for (int i = 1; i < bottom.size(); ++i) { 70 | CHECK(bottom[i]->shape() == bottom[0]->shape()); 71 | } 72 | top[0]->ReshapeLike(*bottom[0]); 73 | } 74 | 75 | template 76 | void FreezeDropPathLayer::Forward_cpu(const vector*>& bottom, 77 | const vector*>& top) { 78 | if (this->phase_ == TRAIN) { 79 | branch = 0; 80 | if (stochastic) { 81 | uint rnd = std::rand() % uint_thresholds_[bottom.size()]; 82 | for (int i = 0; i < bottom.size(); ++i) { 83 | if (rnd >= uint_thresholds_[i] && rnd < uint_thresholds_[i+1]) branch = i; 84 | } 85 | } else { 86 | iteration = (iteration + 1) % this->layer_param_.freeze_drop_path_param().num_iter_per_cycle(); 87 | for (int i = 0; i < bottom.size(); ++i) { 88 | if (iteration >= thresholds_[i] && iteration < thresholds_[i+1]) branch = i; 89 | } 90 | if (branch > lastBranch || branch < lastBranch) 91 | LOG(INFO) << " Changing from branch " << lastBranch << " to " << branch; 92 | } 93 | } else { 94 | branch = bottom.size() - 1; 95 | } 96 | 97 | // Zero the top layer before adding the bottom data 98 | caffe_set(top[0]->count(), Dtype(0), top[0]->mutable_cpu_data()); 99 | for (int i = 0; i <= branch; ++i) { 100 | caffe_axpy(top[0]->count(), Dtype(1.0), bottom[i]->cpu_data(), top[0]->mutable_cpu_data()); 101 | } 102 | lastBranch = branch; 103 | } 104 | 105 | template 106 | void FreezeDropPathLayer::Backward_cpu(const vector*>& top, 107 | const vector& propagate_down, const vector*>& bottom) { 108 | 109 | for (int i = 0; i < bottom.size(); ++i) { 110 | if (propagate_down[i]) { 111 | if (i == branch) { 112 | caffe_copy(bottom[i]->count(), top[0]->cpu_diff(), bottom[i]->mutable_cpu_diff()); 113 | } else { 114 | caffe_set(bottom[i]->count(), Dtype(0), bottom[i]->mutable_cpu_diff()); 115 | } 116 | } 117 | } 118 | } 119 | 120 | #ifdef CPU_ONLY 121 | STUB_GPU(FreezeDropPathLayer); 122 | #endif 123 | 124 | INSTANTIATE_CLASS(FreezeDropPathLayer); 125 | REGISTER_LAYER_CLASS(FreezeDropPath); 126 | 127 | } // namespace caffe 128 | 129 | -------------------------------------------------------------------------------- /freeze_drop_path.cu: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "caffe/layers/freeze_drop_path.hpp" 4 | #include "caffe/util/math_functions.hpp" 5 | #include /* srand, rand */ 6 | 7 | namespace caffe { 8 | 9 | template 10 | void FreezeDropPathLayer::Forward_gpu(const vector*>& bottom, 11 | const vector*>& top) { 12 | if (this->phase_ == TRAIN) { 13 | branch = 0; 14 | if (stochastic) { 15 | uint rnd = std::rand() % uint_thresholds_[bottom.size()]; 16 | for (int i = 0; i < bottom.size(); ++i) { 17 | if (rnd >= uint_thresholds_[i] && rnd < uint_thresholds_[i+1]) branch = i; 18 | } 19 | // LOG(INFO) << " rnd = " << rnd << "branch " << branch; 20 | } else { 21 | iteration = (iteration + 1) % this->layer_param_.freeze_drop_path_param().num_iter_per_cycle(); 22 | for (int i = 0; i < bottom.size(); ++i) { 23 | if (iteration >= thresholds_[i] && iteration < thresholds_[i+1]) branch = i; 24 | } 25 | if (branch > lastBranch || branch < lastBranch) 26 | LOG(INFO) << " Changing from branch " << lastBranch << " to " << branch; 27 | // LOG(INFO) << " iteration = " << iteration << " bottom.size = " << bottom.size() << "branch " << branch; 28 | } 29 | } else { 30 | branch = bottom.size() - 1; 31 | } 32 | 33 | // Zero the top layer before adding the bottom data 34 | caffe_gpu_set(top[0]->count(), Dtype(0), top[0]->mutable_cpu_data()); 35 | for (int i = 0; i <= branch; ++i) { 36 | caffe_gpu_axpy(top[0]->count(), Dtype(1.0), bottom[i]->cpu_data(), top[0]->mutable_cpu_data()); 37 | } 38 | lastBranch = branch; 39 | } 40 | 41 | template 42 | void FreezeDropPathLayer::Backward_gpu(const vector*>& top, 43 | const vector& propagate_down, const vector*>& bottom) { 44 | for (int i = 0; i < bottom.size(); ++i) { 45 | if (propagate_down[i]) { 46 | if (i == branch) { 47 | caffe_copy(bottom[i]->count(), top[0]->cpu_diff(), bottom[i]->mutable_cpu_diff()); 48 | } else { 49 | caffe_gpu_set(bottom[i]->count(), Dtype(0), bottom[i]->mutable_cpu_diff()); 50 | } 51 | } 52 | } 53 | } 54 | 55 | INSTANTIATE_LAYER_GPU_FUNCS(FreezeDropPathLayer); 56 | 57 | } // namespace caffe 58 | -------------------------------------------------------------------------------- /freeze_drop_path.hpp: -------------------------------------------------------------------------------- 1 | #ifndef CAFFE_FREEZE_DROP_PATH_HPP_ 2 | #define CAFFE_FREEZE_DROP_PATH_HPP_ 3 | 4 | #include 5 | 6 | #include "caffe/blob.hpp" 7 | #include "caffe/layer.hpp" 8 | #include "caffe/proto/caffe.pb.h" 9 | 10 | namespace caffe { 11 | 12 | /** 13 | * \ingroup ttic 14 | * @brief Only one path is active at a time during training. Higher paths, if any, are 15 | * frozen and lower paths are dropped. 16 | * This is used with several network branches, where the lower branches 17 | * learn a correction to the upper branches results. 18 | * 19 | * @author Leslie N. Smith 20 | */ 21 | template 22 | class FreezeDropPathLayer : public Layer { 23 | public: 24 | explicit FreezeDropPathLayer(const LayerParameter& param) 25 | : Layer(param) {} 26 | virtual void LayerSetUp(const vector*>& bottom, 27 | const vector*>& top); 28 | virtual void Reshape(const vector*>& bottom, 29 | const vector*>& top); 30 | 31 | virtual inline const char* type() const { return "FreezeDropPathLayer"; } 32 | virtual inline int MinBottomBlobs() const { return 2; } 33 | virtual inline int ExactNumTopBlobs() const { return 1; } 34 | 35 | protected: 36 | virtual void Forward_cpu(const vector*>& bottom, 37 | const vector*>& top); 38 | virtual void Forward_gpu(const vector*>& bottom, 39 | const vector*>& top); 40 | virtual void Backward_cpu(const vector*>& top, 41 | const vector& propagate_down, const vector*>& bottom); 42 | virtual void Backward_gpu(const vector*>& top, 43 | const vector& propagate_down, const vector*>& bottom); 44 | 45 | // std::vector drops_; 46 | unsigned int iteration, branch, lastBranch; 47 | bool stochastic; 48 | std::vector thresholds_; 49 | std::vector uint_thresholds_; 50 | }; 51 | 52 | } // namespace caffe 53 | 54 | #endif // CAFFE_FREEZE_DROP_PATH_HPP_ 55 | -------------------------------------------------------------------------------- /solver.prototxt: -------------------------------------------------------------------------------- 1 | net: "examples/fractalnet/architectures/fractal.prototxt" 2 | test_iter: 200 3 | test_interval: 500 4 | display: 250 5 | max_iter: 100000 6 | base_lr: 0.02 7 | lr_policy: "multistep" 8 | gamma: 0.1 9 | momentum: 0.9 10 | weight_decay: 0.0001 11 | snapshot: 50000 12 | snapshot_prefix: "examples/fractalnet/snapshots/fractal" 13 | solver_mode: GPU 14 | random_seed: 831486 15 | stepvalue: 50000 16 | stepvalue: 75000 17 | stepvalue: 87500 18 | --------------------------------------------------------------------------------