├── README.md ├── caff_src.model.prototxt ├── caffe_model.zip └── yolov0825.prototxt /README.md: -------------------------------------------------------------------------------- 1 | ## 鉴于经常还是有人问我尽管有模型,但是还是不知道怎么修改工程,我传上之前的整个测试工程,仅供参考. 2 | 3 | # Hisi_YoLoV5 4 | 海思nnie跑yolov5 5 | 6 | ## 模型修改。 7 | ### 参考官方的yolov5 4.0版本 8 | + git clone -b v4.0 https://github.com/ultralytics/yolov5.git/ 9 | 10 | ### 修改处0, models/yolov5s.py 增加如下: 11 | 12 | + 删除 ~~[[-1, 1, Focus, [64, 3]], # 0-P1/2 ~~ 13 | + 添加 [[-1, 1, Conv, [64, 3, 2]], # 0-P1/2 14 | 15 | ### 修改处1, utils/activations.py 增加如下: 16 | class ReLU(nn.Module): 17 | @staticmethod 18 | def forward(x): 19 | return nn.ReLU(x) 20 | 21 | ### 修改处2,models/common.py 35行修改激活函数 22 | + ~~self.act = nn.SiLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity())~~ 23 | 24 | + 添加 self.act = nn.ReLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity()) 25 | 26 | 27 | ### 修改处3, models/comon.py 98行修改maxPool ,修改ceil_mode方式 28 | + self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2, ceil_mode = True) for x in k]) 29 | 30 | ### 修改处4 modes/yolov5s_ 31 | + ~~[-1, 1, nn.Upsample, [None, 2, 'nearest']],~~ 32 | 33 | + 添加 [-1, 1, nn.ConvTranspose2d, [256,256, 2, 2]], 34 | 35 | ### 修改处5, models/yolov5s.yaml 36 | + ~~[-1, 1, nn.Upsample, [None, 2, 'nearest']],~~ 37 | 38 | + 添加 [-1, 1, nn.ConvTranspose2d, [128 ,128, 2, 2]], 39 | 40 | ### 转换ONNX修改: models/export.py , 修改了opset_version 41 | + torch.onnx.export(model, img, f, verbose=False, opset_version=10, input_names=['images']... 42 | 43 | ### 参考 https://github.com/Wulingtian/yolov5_caffe 44 | + 将ONNX 转为caffe模型。 45 | 46 | ### 最后caffe模型中的premute和reshape层,nnie不支持,去掉premute层(一共有三层)reshape改为需要的输出。 47 | -------------------------------------------------------------------------------- /caff_src.model.prototxt: -------------------------------------------------------------------------------- 1 | layer { 2 | name: "images" 3 | type: "Input" 4 | top: "images" 5 | input_param { 6 | shape { 7 | dim: 1 8 | dim: 3 9 | dim: 640 10 | dim: 640 11 | } 12 | } 13 | } 14 | layer { 15 | name: "Conv_0" 16 | type: "Convolution" 17 | bottom: "images" 18 | top: "131" 19 | convolution_param { 20 | num_output: 32 21 | bias_term: true 22 | group: 1 23 | pad_h: 1 24 | pad_w: 1 25 | kernel_h: 3 26 | kernel_w: 3 27 | stride_h: 2 28 | stride_w: 2 29 | dilation: 1 30 | } 31 | } 32 | layer { 33 | name: "Relu_1" 34 | type: "ReLU" 35 | bottom: "131" 36 | top: "132" 37 | } 38 | layer { 39 | name: "Conv_2" 40 | type: "Convolution" 41 | bottom: "132" 42 | top: "133" 43 | convolution_param { 44 | num_output: 64 45 | bias_term: true 46 | group: 1 47 | pad_h: 1 48 | pad_w: 1 49 | kernel_h: 3 50 | kernel_w: 3 51 | stride_h: 2 52 | stride_w: 2 53 | dilation: 1 54 | } 55 | } 56 | layer { 57 | name: "Relu_3" 58 | type: "ReLU" 59 | bottom: "133" 60 | top: "134" 61 | } 62 | layer { 63 | name: "Conv_4" 64 | type: "Convolution" 65 | bottom: "134" 66 | top: "135" 67 | convolution_param { 68 | num_output: 32 69 | bias_term: true 70 | group: 1 71 | pad_h: 0 72 | pad_w: 0 73 | kernel_h: 1 74 | kernel_w: 1 75 | stride_h: 1 76 | stride_w: 1 77 | dilation: 1 78 | } 79 | } 80 | layer { 81 | name: "Relu_5" 82 | type: "ReLU" 83 | bottom: "135" 84 | top: "136" 85 | } 86 | layer { 87 | name: "Conv_6" 88 | type: "Convolution" 89 | bottom: "136" 90 | top: "137" 91 | convolution_param { 92 | num_output: 32 93 | bias_term: true 94 | group: 1 95 | pad_h: 0 96 | pad_w: 0 97 | kernel_h: 1 98 | kernel_w: 1 99 | stride_h: 1 100 | stride_w: 1 101 | dilation: 1 102 | } 103 | } 104 | layer { 105 | name: "Relu_7" 106 | type: "ReLU" 107 | bottom: "137" 108 | top: "138" 109 | } 110 | layer { 111 | name: "Conv_8" 112 | type: "Convolution" 113 | bottom: "138" 114 | top: "139" 115 | convolution_param { 116 | num_output: 32 117 | bias_term: true 118 | group: 1 119 | pad_h: 1 120 | pad_w: 1 121 | kernel_h: 3 122 | kernel_w: 3 123 | stride_h: 1 124 | stride_w: 1 125 | dilation: 1 126 | } 127 | } 128 | layer { 129 | name: "Relu_9" 130 | type: "ReLU" 131 | bottom: "139" 132 | top: "140" 133 | } 134 | layer { 135 | name: "Add_10" 136 | type: "Eltwise" 137 | bottom: "136" 138 | bottom: "140" 139 | top: "141" 140 | eltwise_param { 141 | operation: SUM 142 | } 143 | } 144 | layer { 145 | name: "Conv_11" 146 | type: "Convolution" 147 | bottom: "134" 148 | top: "142" 149 | convolution_param { 150 | num_output: 32 151 | bias_term: true 152 | group: 1 153 | pad_h: 0 154 | pad_w: 0 155 | kernel_h: 1 156 | kernel_w: 1 157 | stride_h: 1 158 | stride_w: 1 159 | dilation: 1 160 | } 161 | } 162 | layer { 163 | name: "Relu_12" 164 | type: "ReLU" 165 | bottom: "142" 166 | top: "143" 167 | } 168 | layer { 169 | name: "Concat_13" 170 | type: "Concat" 171 | bottom: "141" 172 | bottom: "143" 173 | top: "144" 174 | concat_param { 175 | axis: 1 176 | } 177 | } 178 | layer { 179 | name: "Conv_14" 180 | type: "Convolution" 181 | bottom: "144" 182 | top: "145" 183 | convolution_param { 184 | num_output: 64 185 | bias_term: true 186 | group: 1 187 | pad_h: 0 188 | pad_w: 0 189 | kernel_h: 1 190 | kernel_w: 1 191 | stride_h: 1 192 | stride_w: 1 193 | dilation: 1 194 | } 195 | } 196 | layer { 197 | name: "Relu_15" 198 | type: "ReLU" 199 | bottom: "145" 200 | top: "146" 201 | } 202 | layer { 203 | name: "Conv_16" 204 | type: "Convolution" 205 | bottom: "146" 206 | top: "147" 207 | convolution_param { 208 | num_output: 128 209 | bias_term: true 210 | group: 1 211 | pad_h: 1 212 | pad_w: 1 213 | kernel_h: 3 214 | kernel_w: 3 215 | stride_h: 2 216 | stride_w: 2 217 | dilation: 1 218 | } 219 | } 220 | layer { 221 | name: "Relu_17" 222 | type: "ReLU" 223 | bottom: "147" 224 | top: "148" 225 | } 226 | layer { 227 | name: "Conv_18" 228 | type: "Convolution" 229 | bottom: "148" 230 | top: "149" 231 | convolution_param { 232 | num_output: 64 233 | bias_term: true 234 | group: 1 235 | pad_h: 0 236 | pad_w: 0 237 | kernel_h: 1 238 | kernel_w: 1 239 | stride_h: 1 240 | stride_w: 1 241 | dilation: 1 242 | } 243 | } 244 | layer { 245 | name: "Relu_19" 246 | type: "ReLU" 247 | bottom: "149" 248 | top: "150" 249 | } 250 | layer { 251 | name: "Conv_20" 252 | type: "Convolution" 253 | bottom: "150" 254 | top: "151" 255 | convolution_param { 256 | num_output: 64 257 | bias_term: true 258 | group: 1 259 | pad_h: 0 260 | pad_w: 0 261 | kernel_h: 1 262 | kernel_w: 1 263 | stride_h: 1 264 | stride_w: 1 265 | dilation: 1 266 | } 267 | } 268 | layer { 269 | name: "Relu_21" 270 | type: "ReLU" 271 | bottom: "151" 272 | top: "152" 273 | } 274 | layer { 275 | name: "Conv_22" 276 | type: "Convolution" 277 | bottom: "152" 278 | top: "153" 279 | convolution_param { 280 | num_output: 64 281 | bias_term: true 282 | group: 1 283 | pad_h: 1 284 | pad_w: 1 285 | kernel_h: 3 286 | kernel_w: 3 287 | stride_h: 1 288 | stride_w: 1 289 | dilation: 1 290 | } 291 | } 292 | layer { 293 | name: "Relu_23" 294 | type: "ReLU" 295 | bottom: "153" 296 | top: "154" 297 | } 298 | layer { 299 | name: "Add_24" 300 | type: "Eltwise" 301 | bottom: "150" 302 | bottom: "154" 303 | top: "155" 304 | eltwise_param { 305 | operation: SUM 306 | } 307 | } 308 | layer { 309 | name: "Conv_25" 310 | type: "Convolution" 311 | bottom: "155" 312 | top: "156" 313 | convolution_param { 314 | num_output: 64 315 | bias_term: true 316 | group: 1 317 | pad_h: 0 318 | pad_w: 0 319 | kernel_h: 1 320 | kernel_w: 1 321 | stride_h: 1 322 | stride_w: 1 323 | dilation: 1 324 | } 325 | } 326 | layer { 327 | name: "Relu_26" 328 | type: "ReLU" 329 | bottom: "156" 330 | top: "157" 331 | } 332 | layer { 333 | name: "Conv_27" 334 | type: "Convolution" 335 | bottom: "157" 336 | top: "158" 337 | convolution_param { 338 | num_output: 64 339 | bias_term: true 340 | group: 1 341 | pad_h: 1 342 | pad_w: 1 343 | kernel_h: 3 344 | kernel_w: 3 345 | stride_h: 1 346 | stride_w: 1 347 | dilation: 1 348 | } 349 | } 350 | layer { 351 | name: "Relu_28" 352 | type: "ReLU" 353 | bottom: "158" 354 | top: "159" 355 | } 356 | layer { 357 | name: "Add_29" 358 | type: "Eltwise" 359 | bottom: "155" 360 | bottom: "159" 361 | top: "160" 362 | eltwise_param { 363 | operation: SUM 364 | } 365 | } 366 | layer { 367 | name: "Conv_30" 368 | type: "Convolution" 369 | bottom: "160" 370 | top: "161" 371 | convolution_param { 372 | num_output: 64 373 | bias_term: true 374 | group: 1 375 | pad_h: 0 376 | pad_w: 0 377 | kernel_h: 1 378 | kernel_w: 1 379 | stride_h: 1 380 | stride_w: 1 381 | dilation: 1 382 | } 383 | } 384 | layer { 385 | name: "Relu_31" 386 | type: "ReLU" 387 | bottom: "161" 388 | top: "162" 389 | } 390 | layer { 391 | name: "Conv_32" 392 | type: "Convolution" 393 | bottom: "162" 394 | top: "163" 395 | convolution_param { 396 | num_output: 64 397 | bias_term: true 398 | group: 1 399 | pad_h: 1 400 | pad_w: 1 401 | kernel_h: 3 402 | kernel_w: 3 403 | stride_h: 1 404 | stride_w: 1 405 | dilation: 1 406 | } 407 | } 408 | layer { 409 | name: "Relu_33" 410 | type: "ReLU" 411 | bottom: "163" 412 | top: "164" 413 | } 414 | layer { 415 | name: "Add_34" 416 | type: "Eltwise" 417 | bottom: "160" 418 | bottom: "164" 419 | top: "165" 420 | eltwise_param { 421 | operation: SUM 422 | } 423 | } 424 | layer { 425 | name: "Conv_35" 426 | type: "Convolution" 427 | bottom: "148" 428 | top: "166" 429 | convolution_param { 430 | num_output: 64 431 | bias_term: true 432 | group: 1 433 | pad_h: 0 434 | pad_w: 0 435 | kernel_h: 1 436 | kernel_w: 1 437 | stride_h: 1 438 | stride_w: 1 439 | dilation: 1 440 | } 441 | } 442 | layer { 443 | name: "Relu_36" 444 | type: "ReLU" 445 | bottom: "166" 446 | top: "167" 447 | } 448 | layer { 449 | name: "Concat_37" 450 | type: "Concat" 451 | bottom: "165" 452 | bottom: "167" 453 | top: "168" 454 | concat_param { 455 | axis: 1 456 | } 457 | } 458 | layer { 459 | name: "Conv_38" 460 | type: "Convolution" 461 | bottom: "168" 462 | top: "169" 463 | convolution_param { 464 | num_output: 128 465 | bias_term: true 466 | group: 1 467 | pad_h: 0 468 | pad_w: 0 469 | kernel_h: 1 470 | kernel_w: 1 471 | stride_h: 1 472 | stride_w: 1 473 | dilation: 1 474 | } 475 | } 476 | layer { 477 | name: "Relu_39" 478 | type: "ReLU" 479 | bottom: "169" 480 | top: "170" 481 | } 482 | layer { 483 | name: "Conv_40" 484 | type: "Convolution" 485 | bottom: "170" 486 | top: "171" 487 | convolution_param { 488 | num_output: 256 489 | bias_term: true 490 | group: 1 491 | pad_h: 1 492 | pad_w: 1 493 | kernel_h: 3 494 | kernel_w: 3 495 | stride_h: 2 496 | stride_w: 2 497 | dilation: 1 498 | } 499 | } 500 | layer { 501 | name: "Relu_41" 502 | type: "ReLU" 503 | bottom: "171" 504 | top: "172" 505 | } 506 | layer { 507 | name: "Conv_42" 508 | type: "Convolution" 509 | bottom: "172" 510 | top: "173" 511 | convolution_param { 512 | num_output: 128 513 | bias_term: true 514 | group: 1 515 | pad_h: 0 516 | pad_w: 0 517 | kernel_h: 1 518 | kernel_w: 1 519 | stride_h: 1 520 | stride_w: 1 521 | dilation: 1 522 | } 523 | } 524 | layer { 525 | name: "Relu_43" 526 | type: "ReLU" 527 | bottom: "173" 528 | top: "174" 529 | } 530 | layer { 531 | name: "Conv_44" 532 | type: "Convolution" 533 | bottom: "174" 534 | top: "175" 535 | convolution_param { 536 | num_output: 128 537 | bias_term: true 538 | group: 1 539 | pad_h: 0 540 | pad_w: 0 541 | kernel_h: 1 542 | kernel_w: 1 543 | stride_h: 1 544 | stride_w: 1 545 | dilation: 1 546 | } 547 | } 548 | layer { 549 | name: "Relu_45" 550 | type: "ReLU" 551 | bottom: "175" 552 | top: "176" 553 | } 554 | layer { 555 | name: "Conv_46" 556 | type: "Convolution" 557 | bottom: "176" 558 | top: "177" 559 | convolution_param { 560 | num_output: 128 561 | bias_term: true 562 | group: 1 563 | pad_h: 1 564 | pad_w: 1 565 | kernel_h: 3 566 | kernel_w: 3 567 | stride_h: 1 568 | stride_w: 1 569 | dilation: 1 570 | } 571 | } 572 | layer { 573 | name: "Relu_47" 574 | type: "ReLU" 575 | bottom: "177" 576 | top: "178" 577 | } 578 | layer { 579 | name: "Add_48" 580 | type: "Eltwise" 581 | bottom: "174" 582 | bottom: "178" 583 | top: "179" 584 | eltwise_param { 585 | operation: SUM 586 | } 587 | } 588 | layer { 589 | name: "Conv_49" 590 | type: "Convolution" 591 | bottom: "179" 592 | top: "180" 593 | convolution_param { 594 | num_output: 128 595 | bias_term: true 596 | group: 1 597 | pad_h: 0 598 | pad_w: 0 599 | kernel_h: 1 600 | kernel_w: 1 601 | stride_h: 1 602 | stride_w: 1 603 | dilation: 1 604 | } 605 | } 606 | layer { 607 | name: "Relu_50" 608 | type: "ReLU" 609 | bottom: "180" 610 | top: "181" 611 | } 612 | layer { 613 | name: "Conv_51" 614 | type: "Convolution" 615 | bottom: "181" 616 | top: "182" 617 | convolution_param { 618 | num_output: 128 619 | bias_term: true 620 | group: 1 621 | pad_h: 1 622 | pad_w: 1 623 | kernel_h: 3 624 | kernel_w: 3 625 | stride_h: 1 626 | stride_w: 1 627 | dilation: 1 628 | } 629 | } 630 | layer { 631 | name: "Relu_52" 632 | type: "ReLU" 633 | bottom: "182" 634 | top: "183" 635 | } 636 | layer { 637 | name: "Add_53" 638 | type: "Eltwise" 639 | bottom: "179" 640 | bottom: "183" 641 | top: "184" 642 | eltwise_param { 643 | operation: SUM 644 | } 645 | } 646 | layer { 647 | name: "Conv_54" 648 | type: "Convolution" 649 | bottom: "184" 650 | top: "185" 651 | convolution_param { 652 | num_output: 128 653 | bias_term: true 654 | group: 1 655 | pad_h: 0 656 | pad_w: 0 657 | kernel_h: 1 658 | kernel_w: 1 659 | stride_h: 1 660 | stride_w: 1 661 | dilation: 1 662 | } 663 | } 664 | layer { 665 | name: "Relu_55" 666 | type: "ReLU" 667 | bottom: "185" 668 | top: "186" 669 | } 670 | layer { 671 | name: "Conv_56" 672 | type: "Convolution" 673 | bottom: "186" 674 | top: "187" 675 | convolution_param { 676 | num_output: 128 677 | bias_term: true 678 | group: 1 679 | pad_h: 1 680 | pad_w: 1 681 | kernel_h: 3 682 | kernel_w: 3 683 | stride_h: 1 684 | stride_w: 1 685 | dilation: 1 686 | } 687 | } 688 | layer { 689 | name: "Relu_57" 690 | type: "ReLU" 691 | bottom: "187" 692 | top: "188" 693 | } 694 | layer { 695 | name: "Add_58" 696 | type: "Eltwise" 697 | bottom: "184" 698 | bottom: "188" 699 | top: "189" 700 | eltwise_param { 701 | operation: SUM 702 | } 703 | } 704 | layer { 705 | name: "Conv_59" 706 | type: "Convolution" 707 | bottom: "172" 708 | top: "190" 709 | convolution_param { 710 | num_output: 128 711 | bias_term: true 712 | group: 1 713 | pad_h: 0 714 | pad_w: 0 715 | kernel_h: 1 716 | kernel_w: 1 717 | stride_h: 1 718 | stride_w: 1 719 | dilation: 1 720 | } 721 | } 722 | layer { 723 | name: "Relu_60" 724 | type: "ReLU" 725 | bottom: "190" 726 | top: "191" 727 | } 728 | layer { 729 | name: "Concat_61" 730 | type: "Concat" 731 | bottom: "189" 732 | bottom: "191" 733 | top: "192" 734 | concat_param { 735 | axis: 1 736 | } 737 | } 738 | layer { 739 | name: "Conv_62" 740 | type: "Convolution" 741 | bottom: "192" 742 | top: "193" 743 | convolution_param { 744 | num_output: 256 745 | bias_term: true 746 | group: 1 747 | pad_h: 0 748 | pad_w: 0 749 | kernel_h: 1 750 | kernel_w: 1 751 | stride_h: 1 752 | stride_w: 1 753 | dilation: 1 754 | } 755 | } 756 | layer { 757 | name: "Relu_63" 758 | type: "ReLU" 759 | bottom: "193" 760 | top: "194" 761 | } 762 | layer { 763 | name: "Conv_64" 764 | type: "Convolution" 765 | bottom: "194" 766 | top: "195" 767 | convolution_param { 768 | num_output: 512 769 | bias_term: true 770 | group: 1 771 | pad_h: 1 772 | pad_w: 1 773 | kernel_h: 3 774 | kernel_w: 3 775 | stride_h: 2 776 | stride_w: 2 777 | dilation: 1 778 | } 779 | } 780 | layer { 781 | name: "Relu_65" 782 | type: "ReLU" 783 | bottom: "195" 784 | top: "196" 785 | } 786 | layer { 787 | name: "Conv_66" 788 | type: "Convolution" 789 | bottom: "196" 790 | top: "197" 791 | convolution_param { 792 | num_output: 256 793 | bias_term: true 794 | group: 1 795 | pad_h: 0 796 | pad_w: 0 797 | kernel_h: 1 798 | kernel_w: 1 799 | stride_h: 1 800 | stride_w: 1 801 | dilation: 1 802 | } 803 | } 804 | layer { 805 | name: "Relu_67" 806 | type: "ReLU" 807 | bottom: "197" 808 | top: "198" 809 | } 810 | layer { 811 | name: "MaxPool_68" 812 | type: "Pooling" 813 | bottom: "198" 814 | top: "199" 815 | pooling_param { 816 | pool: MAX 817 | kernel_h: 5 818 | kernel_w: 5 819 | stride_h: 1 820 | stride_w: 1 821 | pad_h: 2 822 | pad_w: 2 823 | } 824 | } 825 | layer { 826 | name: "MaxPool_69" 827 | type: "Pooling" 828 | bottom: "198" 829 | top: "200" 830 | pooling_param { 831 | pool: MAX 832 | kernel_h: 9 833 | kernel_w: 9 834 | stride_h: 1 835 | stride_w: 1 836 | pad_h: 4 837 | pad_w: 4 838 | } 839 | } 840 | layer { 841 | name: "MaxPool_70" 842 | type: "Pooling" 843 | bottom: "198" 844 | top: "201" 845 | pooling_param { 846 | pool: MAX 847 | kernel_h: 13 848 | kernel_w: 13 849 | stride_h: 1 850 | stride_w: 1 851 | pad_h: 6 852 | pad_w: 6 853 | } 854 | } 855 | layer { 856 | name: "Concat_71" 857 | type: "Concat" 858 | bottom: "198" 859 | bottom: "199" 860 | bottom: "200" 861 | bottom: "201" 862 | top: "202" 863 | concat_param { 864 | axis: 1 865 | } 866 | } 867 | layer { 868 | name: "Conv_72" 869 | type: "Convolution" 870 | bottom: "202" 871 | top: "203" 872 | convolution_param { 873 | num_output: 512 874 | bias_term: true 875 | group: 1 876 | pad_h: 0 877 | pad_w: 0 878 | kernel_h: 1 879 | kernel_w: 1 880 | stride_h: 1 881 | stride_w: 1 882 | dilation: 1 883 | } 884 | } 885 | layer { 886 | name: "Relu_73" 887 | type: "ReLU" 888 | bottom: "203" 889 | top: "204" 890 | } 891 | layer { 892 | name: "Conv_74" 893 | type: "Convolution" 894 | bottom: "204" 895 | top: "205" 896 | convolution_param { 897 | num_output: 256 898 | bias_term: true 899 | group: 1 900 | pad_h: 0 901 | pad_w: 0 902 | kernel_h: 1 903 | kernel_w: 1 904 | stride_h: 1 905 | stride_w: 1 906 | dilation: 1 907 | } 908 | } 909 | layer { 910 | name: "Relu_75" 911 | type: "ReLU" 912 | bottom: "205" 913 | top: "206" 914 | } 915 | layer { 916 | name: "Conv_76" 917 | type: "Convolution" 918 | bottom: "206" 919 | top: "207" 920 | convolution_param { 921 | num_output: 256 922 | bias_term: true 923 | group: 1 924 | pad_h: 0 925 | pad_w: 0 926 | kernel_h: 1 927 | kernel_w: 1 928 | stride_h: 1 929 | stride_w: 1 930 | dilation: 1 931 | } 932 | } 933 | layer { 934 | name: "Relu_77" 935 | type: "ReLU" 936 | bottom: "207" 937 | top: "208" 938 | } 939 | layer { 940 | name: "Conv_78" 941 | type: "Convolution" 942 | bottom: "208" 943 | top: "209" 944 | convolution_param { 945 | num_output: 256 946 | bias_term: true 947 | group: 1 948 | pad_h: 1 949 | pad_w: 1 950 | kernel_h: 3 951 | kernel_w: 3 952 | stride_h: 1 953 | stride_w: 1 954 | dilation: 1 955 | } 956 | } 957 | layer { 958 | name: "Relu_79" 959 | type: "ReLU" 960 | bottom: "209" 961 | top: "210" 962 | } 963 | layer { 964 | name: "Conv_80" 965 | type: "Convolution" 966 | bottom: "204" 967 | top: "211" 968 | convolution_param { 969 | num_output: 256 970 | bias_term: true 971 | group: 1 972 | pad_h: 0 973 | pad_w: 0 974 | kernel_h: 1 975 | kernel_w: 1 976 | stride_h: 1 977 | stride_w: 1 978 | dilation: 1 979 | } 980 | } 981 | layer { 982 | name: "Relu_81" 983 | type: "ReLU" 984 | bottom: "211" 985 | top: "212" 986 | } 987 | layer { 988 | name: "Concat_82" 989 | type: "Concat" 990 | bottom: "210" 991 | bottom: "212" 992 | top: "213" 993 | concat_param { 994 | axis: 1 995 | } 996 | } 997 | layer { 998 | name: "Conv_83" 999 | type: "Convolution" 1000 | bottom: "213" 1001 | top: "214" 1002 | convolution_param { 1003 | num_output: 512 1004 | bias_term: true 1005 | group: 1 1006 | pad_h: 0 1007 | pad_w: 0 1008 | kernel_h: 1 1009 | kernel_w: 1 1010 | stride_h: 1 1011 | stride_w: 1 1012 | dilation: 1 1013 | } 1014 | } 1015 | layer { 1016 | name: "Relu_84" 1017 | type: "ReLU" 1018 | bottom: "214" 1019 | top: "215" 1020 | } 1021 | layer { 1022 | name: "Conv_85" 1023 | type: "Convolution" 1024 | bottom: "215" 1025 | top: "216" 1026 | convolution_param { 1027 | num_output: 256 1028 | bias_term: true 1029 | group: 1 1030 | pad_h: 0 1031 | pad_w: 0 1032 | kernel_h: 1 1033 | kernel_w: 1 1034 | stride_h: 1 1035 | stride_w: 1 1036 | dilation: 1 1037 | } 1038 | } 1039 | layer { 1040 | name: "Relu_86" 1041 | type: "ReLU" 1042 | bottom: "216" 1043 | top: "217" 1044 | } 1045 | layer { 1046 | name: "ConvTranspose_87" 1047 | type: "Deconvolution" 1048 | bottom: "217" 1049 | top: "218" 1050 | convolution_param { 1051 | num_output: 256 1052 | bias_term: true 1053 | group: 1 1054 | pad_h: 0 1055 | pad_w: 0 1056 | kernel_h: 2 1057 | kernel_w: 2 1058 | stride_h: 2 1059 | stride_w: 2 1060 | } 1061 | } 1062 | layer { 1063 | name: "Concat_88" 1064 | type: "Concat" 1065 | bottom: "218" 1066 | bottom: "194" 1067 | top: "219" 1068 | concat_param { 1069 | axis: 1 1070 | } 1071 | } 1072 | layer { 1073 | name: "Conv_89" 1074 | type: "Convolution" 1075 | bottom: "219" 1076 | top: "220" 1077 | convolution_param { 1078 | num_output: 128 1079 | bias_term: true 1080 | group: 1 1081 | pad_h: 0 1082 | pad_w: 0 1083 | kernel_h: 1 1084 | kernel_w: 1 1085 | stride_h: 1 1086 | stride_w: 1 1087 | dilation: 1 1088 | } 1089 | } 1090 | layer { 1091 | name: "Relu_90" 1092 | type: "ReLU" 1093 | bottom: "220" 1094 | top: "221" 1095 | } 1096 | layer { 1097 | name: "Conv_91" 1098 | type: "Convolution" 1099 | bottom: "221" 1100 | top: "222" 1101 | convolution_param { 1102 | num_output: 128 1103 | bias_term: true 1104 | group: 1 1105 | pad_h: 0 1106 | pad_w: 0 1107 | kernel_h: 1 1108 | kernel_w: 1 1109 | stride_h: 1 1110 | stride_w: 1 1111 | dilation: 1 1112 | } 1113 | } 1114 | layer { 1115 | name: "Relu_92" 1116 | type: "ReLU" 1117 | bottom: "222" 1118 | top: "223" 1119 | } 1120 | layer { 1121 | name: "Conv_93" 1122 | type: "Convolution" 1123 | bottom: "223" 1124 | top: "224" 1125 | convolution_param { 1126 | num_output: 128 1127 | bias_term: true 1128 | group: 1 1129 | pad_h: 1 1130 | pad_w: 1 1131 | kernel_h: 3 1132 | kernel_w: 3 1133 | stride_h: 1 1134 | stride_w: 1 1135 | dilation: 1 1136 | } 1137 | } 1138 | layer { 1139 | name: "Relu_94" 1140 | type: "ReLU" 1141 | bottom: "224" 1142 | top: "225" 1143 | } 1144 | layer { 1145 | name: "Conv_95" 1146 | type: "Convolution" 1147 | bottom: "219" 1148 | top: "226" 1149 | convolution_param { 1150 | num_output: 128 1151 | bias_term: true 1152 | group: 1 1153 | pad_h: 0 1154 | pad_w: 0 1155 | kernel_h: 1 1156 | kernel_w: 1 1157 | stride_h: 1 1158 | stride_w: 1 1159 | dilation: 1 1160 | } 1161 | } 1162 | layer { 1163 | name: "Relu_96" 1164 | type: "ReLU" 1165 | bottom: "226" 1166 | top: "227" 1167 | } 1168 | layer { 1169 | name: "Concat_97" 1170 | type: "Concat" 1171 | bottom: "225" 1172 | bottom: "227" 1173 | top: "228" 1174 | concat_param { 1175 | axis: 1 1176 | } 1177 | } 1178 | layer { 1179 | name: "Conv_98" 1180 | type: "Convolution" 1181 | bottom: "228" 1182 | top: "229" 1183 | convolution_param { 1184 | num_output: 256 1185 | bias_term: true 1186 | group: 1 1187 | pad_h: 0 1188 | pad_w: 0 1189 | kernel_h: 1 1190 | kernel_w: 1 1191 | stride_h: 1 1192 | stride_w: 1 1193 | dilation: 1 1194 | } 1195 | } 1196 | layer { 1197 | name: "Relu_99" 1198 | type: "ReLU" 1199 | bottom: "229" 1200 | top: "230" 1201 | } 1202 | layer { 1203 | name: "Conv_100" 1204 | type: "Convolution" 1205 | bottom: "230" 1206 | top: "231" 1207 | convolution_param { 1208 | num_output: 128 1209 | bias_term: true 1210 | group: 1 1211 | pad_h: 0 1212 | pad_w: 0 1213 | kernel_h: 1 1214 | kernel_w: 1 1215 | stride_h: 1 1216 | stride_w: 1 1217 | dilation: 1 1218 | } 1219 | } 1220 | layer { 1221 | name: "Relu_101" 1222 | type: "ReLU" 1223 | bottom: "231" 1224 | top: "232" 1225 | } 1226 | layer { 1227 | name: "ConvTranspose_102" 1228 | type: "Deconvolution" 1229 | bottom: "232" 1230 | top: "233" 1231 | convolution_param { 1232 | num_output: 128 1233 | bias_term: true 1234 | group: 1 1235 | pad_h: 0 1236 | pad_w: 0 1237 | kernel_h: 2 1238 | kernel_w: 2 1239 | stride_h: 2 1240 | stride_w: 2 1241 | } 1242 | } 1243 | layer { 1244 | name: "Concat_103" 1245 | type: "Concat" 1246 | bottom: "233" 1247 | bottom: "170" 1248 | top: "234" 1249 | concat_param { 1250 | axis: 1 1251 | } 1252 | } 1253 | layer { 1254 | name: "Conv_104" 1255 | type: "Convolution" 1256 | bottom: "234" 1257 | top: "235" 1258 | convolution_param { 1259 | num_output: 64 1260 | bias_term: true 1261 | group: 1 1262 | pad_h: 0 1263 | pad_w: 0 1264 | kernel_h: 1 1265 | kernel_w: 1 1266 | stride_h: 1 1267 | stride_w: 1 1268 | dilation: 1 1269 | } 1270 | } 1271 | layer { 1272 | name: "Relu_105" 1273 | type: "ReLU" 1274 | bottom: "235" 1275 | top: "236" 1276 | } 1277 | layer { 1278 | name: "Conv_106" 1279 | type: "Convolution" 1280 | bottom: "236" 1281 | top: "237" 1282 | convolution_param { 1283 | num_output: 64 1284 | bias_term: true 1285 | group: 1 1286 | pad_h: 0 1287 | pad_w: 0 1288 | kernel_h: 1 1289 | kernel_w: 1 1290 | stride_h: 1 1291 | stride_w: 1 1292 | dilation: 1 1293 | } 1294 | } 1295 | layer { 1296 | name: "Relu_107" 1297 | type: "ReLU" 1298 | bottom: "237" 1299 | top: "238" 1300 | } 1301 | layer { 1302 | name: "Conv_108" 1303 | type: "Convolution" 1304 | bottom: "238" 1305 | top: "239" 1306 | convolution_param { 1307 | num_output: 64 1308 | bias_term: true 1309 | group: 1 1310 | pad_h: 1 1311 | pad_w: 1 1312 | kernel_h: 3 1313 | kernel_w: 3 1314 | stride_h: 1 1315 | stride_w: 1 1316 | dilation: 1 1317 | } 1318 | } 1319 | layer { 1320 | name: "Relu_109" 1321 | type: "ReLU" 1322 | bottom: "239" 1323 | top: "240" 1324 | } 1325 | layer { 1326 | name: "Conv_110" 1327 | type: "Convolution" 1328 | bottom: "234" 1329 | top: "241" 1330 | convolution_param { 1331 | num_output: 64 1332 | bias_term: true 1333 | group: 1 1334 | pad_h: 0 1335 | pad_w: 0 1336 | kernel_h: 1 1337 | kernel_w: 1 1338 | stride_h: 1 1339 | stride_w: 1 1340 | dilation: 1 1341 | } 1342 | } 1343 | layer { 1344 | name: "Relu_111" 1345 | type: "ReLU" 1346 | bottom: "241" 1347 | top: "242" 1348 | } 1349 | layer { 1350 | name: "Concat_112" 1351 | type: "Concat" 1352 | bottom: "240" 1353 | bottom: "242" 1354 | top: "243" 1355 | concat_param { 1356 | axis: 1 1357 | } 1358 | } 1359 | layer { 1360 | name: "Conv_113" 1361 | type: "Convolution" 1362 | bottom: "243" 1363 | top: "244" 1364 | convolution_param { 1365 | num_output: 128 1366 | bias_term: true 1367 | group: 1 1368 | pad_h: 0 1369 | pad_w: 0 1370 | kernel_h: 1 1371 | kernel_w: 1 1372 | stride_h: 1 1373 | stride_w: 1 1374 | dilation: 1 1375 | } 1376 | } 1377 | layer { 1378 | name: "Relu_114" 1379 | type: "ReLU" 1380 | bottom: "244" 1381 | top: "245" 1382 | } 1383 | layer { 1384 | name: "Conv_115" 1385 | type: "Convolution" 1386 | bottom: "245" 1387 | top: "246" 1388 | convolution_param { 1389 | num_output: 128 1390 | bias_term: true 1391 | group: 1 1392 | pad_h: 1 1393 | pad_w: 1 1394 | kernel_h: 3 1395 | kernel_w: 3 1396 | stride_h: 2 1397 | stride_w: 2 1398 | dilation: 1 1399 | } 1400 | } 1401 | layer { 1402 | name: "Relu_116" 1403 | type: "ReLU" 1404 | bottom: "246" 1405 | top: "247" 1406 | } 1407 | layer { 1408 | name: "Concat_117" 1409 | type: "Concat" 1410 | bottom: "247" 1411 | bottom: "232" 1412 | top: "248" 1413 | concat_param { 1414 | axis: 1 1415 | } 1416 | } 1417 | layer { 1418 | name: "Conv_118" 1419 | type: "Convolution" 1420 | bottom: "248" 1421 | top: "249" 1422 | convolution_param { 1423 | num_output: 128 1424 | bias_term: true 1425 | group: 1 1426 | pad_h: 0 1427 | pad_w: 0 1428 | kernel_h: 1 1429 | kernel_w: 1 1430 | stride_h: 1 1431 | stride_w: 1 1432 | dilation: 1 1433 | } 1434 | } 1435 | layer { 1436 | name: "Relu_119" 1437 | type: "ReLU" 1438 | bottom: "249" 1439 | top: "250" 1440 | } 1441 | layer { 1442 | name: "Conv_120" 1443 | type: "Convolution" 1444 | bottom: "250" 1445 | top: "251" 1446 | convolution_param { 1447 | num_output: 128 1448 | bias_term: true 1449 | group: 1 1450 | pad_h: 0 1451 | pad_w: 0 1452 | kernel_h: 1 1453 | kernel_w: 1 1454 | stride_h: 1 1455 | stride_w: 1 1456 | dilation: 1 1457 | } 1458 | } 1459 | layer { 1460 | name: "Relu_121" 1461 | type: "ReLU" 1462 | bottom: "251" 1463 | top: "252" 1464 | } 1465 | layer { 1466 | name: "Conv_122" 1467 | type: "Convolution" 1468 | bottom: "252" 1469 | top: "253" 1470 | convolution_param { 1471 | num_output: 128 1472 | bias_term: true 1473 | group: 1 1474 | pad_h: 1 1475 | pad_w: 1 1476 | kernel_h: 3 1477 | kernel_w: 3 1478 | stride_h: 1 1479 | stride_w: 1 1480 | dilation: 1 1481 | } 1482 | } 1483 | layer { 1484 | name: "Relu_123" 1485 | type: "ReLU" 1486 | bottom: "253" 1487 | top: "254" 1488 | } 1489 | layer { 1490 | name: "Conv_124" 1491 | type: "Convolution" 1492 | bottom: "248" 1493 | top: "255" 1494 | convolution_param { 1495 | num_output: 128 1496 | bias_term: true 1497 | group: 1 1498 | pad_h: 0 1499 | pad_w: 0 1500 | kernel_h: 1 1501 | kernel_w: 1 1502 | stride_h: 1 1503 | stride_w: 1 1504 | dilation: 1 1505 | } 1506 | } 1507 | layer { 1508 | name: "Relu_125" 1509 | type: "ReLU" 1510 | bottom: "255" 1511 | top: "256" 1512 | } 1513 | layer { 1514 | name: "Concat_126" 1515 | type: "Concat" 1516 | bottom: "254" 1517 | bottom: "256" 1518 | top: "257" 1519 | concat_param { 1520 | axis: 1 1521 | } 1522 | } 1523 | layer { 1524 | name: "Conv_127" 1525 | type: "Convolution" 1526 | bottom: "257" 1527 | top: "258" 1528 | convolution_param { 1529 | num_output: 256 1530 | bias_term: true 1531 | group: 1 1532 | pad_h: 0 1533 | pad_w: 0 1534 | kernel_h: 1 1535 | kernel_w: 1 1536 | stride_h: 1 1537 | stride_w: 1 1538 | dilation: 1 1539 | } 1540 | } 1541 | layer { 1542 | name: "Relu_128" 1543 | type: "ReLU" 1544 | bottom: "258" 1545 | top: "259" 1546 | } 1547 | layer { 1548 | name: "Conv_129" 1549 | type: "Convolution" 1550 | bottom: "259" 1551 | top: "260" 1552 | convolution_param { 1553 | num_output: 256 1554 | bias_term: true 1555 | group: 1 1556 | pad_h: 1 1557 | pad_w: 1 1558 | kernel_h: 3 1559 | kernel_w: 3 1560 | stride_h: 2 1561 | stride_w: 2 1562 | dilation: 1 1563 | } 1564 | } 1565 | layer { 1566 | name: "Relu_130" 1567 | type: "ReLU" 1568 | bottom: "260" 1569 | top: "261" 1570 | } 1571 | layer { 1572 | name: "Concat_131" 1573 | type: "Concat" 1574 | bottom: "261" 1575 | bottom: "217" 1576 | top: "262" 1577 | concat_param { 1578 | axis: 1 1579 | } 1580 | } 1581 | layer { 1582 | name: "Conv_132" 1583 | type: "Convolution" 1584 | bottom: "262" 1585 | top: "263" 1586 | convolution_param { 1587 | num_output: 256 1588 | bias_term: true 1589 | group: 1 1590 | pad_h: 0 1591 | pad_w: 0 1592 | kernel_h: 1 1593 | kernel_w: 1 1594 | stride_h: 1 1595 | stride_w: 1 1596 | dilation: 1 1597 | } 1598 | } 1599 | layer { 1600 | name: "Relu_133" 1601 | type: "ReLU" 1602 | bottom: "263" 1603 | top: "264" 1604 | } 1605 | layer { 1606 | name: "Conv_134" 1607 | type: "Convolution" 1608 | bottom: "264" 1609 | top: "265" 1610 | convolution_param { 1611 | num_output: 256 1612 | bias_term: true 1613 | group: 1 1614 | pad_h: 0 1615 | pad_w: 0 1616 | kernel_h: 1 1617 | kernel_w: 1 1618 | stride_h: 1 1619 | stride_w: 1 1620 | dilation: 1 1621 | } 1622 | } 1623 | layer { 1624 | name: "Relu_135" 1625 | type: "ReLU" 1626 | bottom: "265" 1627 | top: "266" 1628 | } 1629 | layer { 1630 | name: "Conv_136" 1631 | type: "Convolution" 1632 | bottom: "266" 1633 | top: "267" 1634 | convolution_param { 1635 | num_output: 256 1636 | bias_term: true 1637 | group: 1 1638 | pad_h: 1 1639 | pad_w: 1 1640 | kernel_h: 3 1641 | kernel_w: 3 1642 | stride_h: 1 1643 | stride_w: 1 1644 | dilation: 1 1645 | } 1646 | } 1647 | layer { 1648 | name: "Relu_137" 1649 | type: "ReLU" 1650 | bottom: "267" 1651 | top: "268" 1652 | } 1653 | layer { 1654 | name: "Conv_138" 1655 | type: "Convolution" 1656 | bottom: "262" 1657 | top: "269" 1658 | convolution_param { 1659 | num_output: 256 1660 | bias_term: true 1661 | group: 1 1662 | pad_h: 0 1663 | pad_w: 0 1664 | kernel_h: 1 1665 | kernel_w: 1 1666 | stride_h: 1 1667 | stride_w: 1 1668 | dilation: 1 1669 | } 1670 | } 1671 | layer { 1672 | name: "Relu_139" 1673 | type: "ReLU" 1674 | bottom: "269" 1675 | top: "270" 1676 | } 1677 | layer { 1678 | name: "Concat_140" 1679 | type: "Concat" 1680 | bottom: "268" 1681 | bottom: "270" 1682 | top: "271" 1683 | concat_param { 1684 | axis: 1 1685 | } 1686 | } 1687 | layer { 1688 | name: "Conv_141" 1689 | type: "Convolution" 1690 | bottom: "271" 1691 | top: "272" 1692 | convolution_param { 1693 | num_output: 512 1694 | bias_term: true 1695 | group: 1 1696 | pad_h: 0 1697 | pad_w: 0 1698 | kernel_h: 1 1699 | kernel_w: 1 1700 | stride_h: 1 1701 | stride_w: 1 1702 | dilation: 1 1703 | } 1704 | } 1705 | layer { 1706 | name: "Relu_142" 1707 | type: "ReLU" 1708 | bottom: "272" 1709 | top: "273" 1710 | } 1711 | layer { 1712 | name: "Conv_143" 1713 | type: "Convolution" 1714 | bottom: "245" 1715 | top: "274" 1716 | convolution_param { 1717 | num_output: 21 1718 | bias_term: true 1719 | group: 1 1720 | pad_h: 0 1721 | pad_w: 0 1722 | kernel_h: 1 1723 | kernel_w: 1 1724 | stride_h: 1 1725 | stride_w: 1 1726 | dilation: 1 1727 | } 1728 | } 1729 | layer { 1730 | name: "Reshape_157" 1731 | type: "Reshape" 1732 | bottom: "274" 1733 | top: "292" 1734 | reshape_param { 1735 | shape { 1736 | dim: 1 1737 | dim: 3 1738 | dim: 7 1739 | dim: 80 1740 | dim: 80 1741 | } 1742 | } 1743 | } 1744 | layer { 1745 | name: "Transpose_158" 1746 | type: "Permute" 1747 | bottom: "292" 1748 | top: "output" 1749 | permute_param { 1750 | order: 0 1751 | order: 1 1752 | order: 3 1753 | order: 4 1754 | order: 2 1755 | } 1756 | } 1757 | layer { 1758 | name: "Conv_159" 1759 | type: "Convolution" 1760 | bottom: "259" 1761 | top: "294" 1762 | convolution_param { 1763 | num_output: 21 1764 | bias_term: true 1765 | group: 1 1766 | pad_h: 0 1767 | pad_w: 0 1768 | kernel_h: 1 1769 | kernel_w: 1 1770 | stride_h: 1 1771 | stride_w: 1 1772 | dilation: 1 1773 | } 1774 | } 1775 | layer { 1776 | name: "Reshape_173" 1777 | type: "Reshape" 1778 | bottom: "294" 1779 | top: "312" 1780 | reshape_param { 1781 | shape { 1782 | dim: 1 1783 | dim: 3 1784 | dim: 7 1785 | dim: 40 1786 | dim: 40 1787 | } 1788 | } 1789 | } 1790 | layer { 1791 | name: "Transpose_174" 1792 | type: "Permute" 1793 | bottom: "312" 1794 | top: "313" 1795 | permute_param { 1796 | order: 0 1797 | order: 1 1798 | order: 3 1799 | order: 4 1800 | order: 2 1801 | } 1802 | } 1803 | layer { 1804 | name: "Conv_175" 1805 | type: "Convolution" 1806 | bottom: "273" 1807 | top: "314" 1808 | convolution_param { 1809 | num_output: 21 1810 | bias_term: true 1811 | group: 1 1812 | pad_h: 0 1813 | pad_w: 0 1814 | kernel_h: 1 1815 | kernel_w: 1 1816 | stride_h: 1 1817 | stride_w: 1 1818 | dilation: 1 1819 | } 1820 | } 1821 | layer { 1822 | name: "Reshape_189" 1823 | type: "Reshape" 1824 | bottom: "314" 1825 | top: "332" 1826 | reshape_param { 1827 | shape { 1828 | dim: 1 1829 | dim: 3 1830 | dim: 7 1831 | dim: 20 1832 | dim: 20 1833 | } 1834 | } 1835 | } 1836 | layer { 1837 | name: "Transpose_190" 1838 | type: "Permute" 1839 | bottom: "332" 1840 | top: "333" 1841 | permute_param { 1842 | order: 0 1843 | order: 1 1844 | order: 3 1845 | order: 4 1846 | order: 2 1847 | } 1848 | } 1849 | 1850 | -------------------------------------------------------------------------------- /caffe_model.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xiaqing10/Hisi_YoLoV5/9e33759f6adbe60082e7e320d260602d89b8ef48/caffe_model.zip -------------------------------------------------------------------------------- /yolov0825.prototxt: -------------------------------------------------------------------------------- 1 | layer { 2 | name: "images" 3 | type: "Input" 4 | top: "images" 5 | input_param { 6 | shape { 7 | dim: 1 8 | dim: 3 9 | dim: 640 10 | dim: 640 11 | } 12 | } 13 | } 14 | layer { 15 | name: "Conv_0" 16 | type: "Convolution" 17 | bottom: "images" 18 | top: "131" 19 | convolution_param { 20 | num_output: 32 21 | bias_term: true 22 | group: 1 23 | pad_h: 1 24 | pad_w: 1 25 | kernel_h: 3 26 | kernel_w: 3 27 | stride_h: 2 28 | stride_w: 2 29 | dilation: 1 30 | } 31 | } 32 | layer { 33 | name: "Relu_1" 34 | type: "ReLU" 35 | bottom: "131" 36 | top: "132" 37 | } 38 | layer { 39 | name: "Conv_2" 40 | type: "Convolution" 41 | bottom: "132" 42 | top: "133" 43 | convolution_param { 44 | num_output: 64 45 | bias_term: true 46 | group: 1 47 | pad_h: 1 48 | pad_w: 1 49 | kernel_h: 3 50 | kernel_w: 3 51 | stride_h: 2 52 | stride_w: 2 53 | dilation: 1 54 | } 55 | } 56 | layer { 57 | name: "Relu_3" 58 | type: "ReLU" 59 | bottom: "133" 60 | top: "134" 61 | } 62 | layer { 63 | name: "Conv_4" 64 | type: "Convolution" 65 | bottom: "134" 66 | top: "135" 67 | convolution_param { 68 | num_output: 32 69 | bias_term: true 70 | group: 1 71 | pad_h: 0 72 | pad_w: 0 73 | kernel_h: 1 74 | kernel_w: 1 75 | stride_h: 1 76 | stride_w: 1 77 | dilation: 1 78 | } 79 | } 80 | layer { 81 | name: "Relu_5" 82 | type: "ReLU" 83 | bottom: "135" 84 | top: "136" 85 | } 86 | layer { 87 | name: "Conv_6" 88 | type: "Convolution" 89 | bottom: "136" 90 | top: "137" 91 | convolution_param { 92 | num_output: 32 93 | bias_term: true 94 | group: 1 95 | pad_h: 0 96 | pad_w: 0 97 | kernel_h: 1 98 | kernel_w: 1 99 | stride_h: 1 100 | stride_w: 1 101 | dilation: 1 102 | } 103 | } 104 | layer { 105 | name: "Relu_7" 106 | type: "ReLU" 107 | bottom: "137" 108 | top: "138" 109 | } 110 | layer { 111 | name: "Conv_8" 112 | type: "Convolution" 113 | bottom: "138" 114 | top: "139" 115 | convolution_param { 116 | num_output: 32 117 | bias_term: true 118 | group: 1 119 | pad_h: 1 120 | pad_w: 1 121 | kernel_h: 3 122 | kernel_w: 3 123 | stride_h: 1 124 | stride_w: 1 125 | dilation: 1 126 | } 127 | } 128 | layer { 129 | name: "Relu_9" 130 | type: "ReLU" 131 | bottom: "139" 132 | top: "140" 133 | } 134 | layer { 135 | name: "Add_10" 136 | type: "Eltwise" 137 | bottom: "136" 138 | bottom: "140" 139 | top: "141" 140 | eltwise_param { 141 | operation: SUM 142 | } 143 | } 144 | layer { 145 | name: "Conv_11" 146 | type: "Convolution" 147 | bottom: "134" 148 | top: "142" 149 | convolution_param { 150 | num_output: 32 151 | bias_term: true 152 | group: 1 153 | pad_h: 0 154 | pad_w: 0 155 | kernel_h: 1 156 | kernel_w: 1 157 | stride_h: 1 158 | stride_w: 1 159 | dilation: 1 160 | } 161 | } 162 | layer { 163 | name: "Relu_12" 164 | type: "ReLU" 165 | bottom: "142" 166 | top: "143" 167 | } 168 | layer { 169 | name: "Concat_13" 170 | type: "Concat" 171 | bottom: "141" 172 | bottom: "143" 173 | top: "144" 174 | concat_param { 175 | axis: 1 176 | } 177 | } 178 | layer { 179 | name: "Conv_14" 180 | type: "Convolution" 181 | bottom: "144" 182 | top: "145" 183 | convolution_param { 184 | num_output: 64 185 | bias_term: true 186 | group: 1 187 | pad_h: 0 188 | pad_w: 0 189 | kernel_h: 1 190 | kernel_w: 1 191 | stride_h: 1 192 | stride_w: 1 193 | dilation: 1 194 | } 195 | } 196 | layer { 197 | name: "Relu_15" 198 | type: "ReLU" 199 | bottom: "145" 200 | top: "146" 201 | } 202 | layer { 203 | name: "Conv_16" 204 | type: "Convolution" 205 | bottom: "146" 206 | top: "147" 207 | convolution_param { 208 | num_output: 128 209 | bias_term: true 210 | group: 1 211 | pad_h: 1 212 | pad_w: 1 213 | kernel_h: 3 214 | kernel_w: 3 215 | stride_h: 2 216 | stride_w: 2 217 | dilation: 1 218 | } 219 | } 220 | layer { 221 | name: "Relu_17" 222 | type: "ReLU" 223 | bottom: "147" 224 | top: "148" 225 | } 226 | layer { 227 | name: "Conv_18" 228 | type: "Convolution" 229 | bottom: "148" 230 | top: "149" 231 | convolution_param { 232 | num_output: 64 233 | bias_term: true 234 | group: 1 235 | pad_h: 0 236 | pad_w: 0 237 | kernel_h: 1 238 | kernel_w: 1 239 | stride_h: 1 240 | stride_w: 1 241 | dilation: 1 242 | } 243 | } 244 | layer { 245 | name: "Relu_19" 246 | type: "ReLU" 247 | bottom: "149" 248 | top: "150" 249 | } 250 | layer { 251 | name: "Conv_20" 252 | type: "Convolution" 253 | bottom: "150" 254 | top: "151" 255 | convolution_param { 256 | num_output: 64 257 | bias_term: true 258 | group: 1 259 | pad_h: 0 260 | pad_w: 0 261 | kernel_h: 1 262 | kernel_w: 1 263 | stride_h: 1 264 | stride_w: 1 265 | dilation: 1 266 | } 267 | } 268 | layer { 269 | name: "Relu_21" 270 | type: "ReLU" 271 | bottom: "151" 272 | top: "152" 273 | } 274 | layer { 275 | name: "Conv_22" 276 | type: "Convolution" 277 | bottom: "152" 278 | top: "153" 279 | convolution_param { 280 | num_output: 64 281 | bias_term: true 282 | group: 1 283 | pad_h: 1 284 | pad_w: 1 285 | kernel_h: 3 286 | kernel_w: 3 287 | stride_h: 1 288 | stride_w: 1 289 | dilation: 1 290 | } 291 | } 292 | layer { 293 | name: "Relu_23" 294 | type: "ReLU" 295 | bottom: "153" 296 | top: "154" 297 | } 298 | layer { 299 | name: "Add_24" 300 | type: "Eltwise" 301 | bottom: "150" 302 | bottom: "154" 303 | top: "155" 304 | eltwise_param { 305 | operation: SUM 306 | } 307 | } 308 | layer { 309 | name: "Conv_25" 310 | type: "Convolution" 311 | bottom: "155" 312 | top: "156" 313 | convolution_param { 314 | num_output: 64 315 | bias_term: true 316 | group: 1 317 | pad_h: 0 318 | pad_w: 0 319 | kernel_h: 1 320 | kernel_w: 1 321 | stride_h: 1 322 | stride_w: 1 323 | dilation: 1 324 | } 325 | } 326 | layer { 327 | name: "Relu_26" 328 | type: "ReLU" 329 | bottom: "156" 330 | top: "157" 331 | } 332 | layer { 333 | name: "Conv_27" 334 | type: "Convolution" 335 | bottom: "157" 336 | top: "158" 337 | convolution_param { 338 | num_output: 64 339 | bias_term: true 340 | group: 1 341 | pad_h: 1 342 | pad_w: 1 343 | kernel_h: 3 344 | kernel_w: 3 345 | stride_h: 1 346 | stride_w: 1 347 | dilation: 1 348 | } 349 | } 350 | layer { 351 | name: "Relu_28" 352 | type: "ReLU" 353 | bottom: "158" 354 | top: "159" 355 | } 356 | layer { 357 | name: "Add_29" 358 | type: "Eltwise" 359 | bottom: "155" 360 | bottom: "159" 361 | top: "160" 362 | eltwise_param { 363 | operation: SUM 364 | } 365 | } 366 | layer { 367 | name: "Conv_30" 368 | type: "Convolution" 369 | bottom: "160" 370 | top: "161" 371 | convolution_param { 372 | num_output: 64 373 | bias_term: true 374 | group: 1 375 | pad_h: 0 376 | pad_w: 0 377 | kernel_h: 1 378 | kernel_w: 1 379 | stride_h: 1 380 | stride_w: 1 381 | dilation: 1 382 | } 383 | } 384 | layer { 385 | name: "Relu_31" 386 | type: "ReLU" 387 | bottom: "161" 388 | top: "162" 389 | } 390 | layer { 391 | name: "Conv_32" 392 | type: "Convolution" 393 | bottom: "162" 394 | top: "163" 395 | convolution_param { 396 | num_output: 64 397 | bias_term: true 398 | group: 1 399 | pad_h: 1 400 | pad_w: 1 401 | kernel_h: 3 402 | kernel_w: 3 403 | stride_h: 1 404 | stride_w: 1 405 | dilation: 1 406 | } 407 | } 408 | layer { 409 | name: "Relu_33" 410 | type: "ReLU" 411 | bottom: "163" 412 | top: "164" 413 | } 414 | layer { 415 | name: "Add_34" 416 | type: "Eltwise" 417 | bottom: "160" 418 | bottom: "164" 419 | top: "165" 420 | eltwise_param { 421 | operation: SUM 422 | } 423 | } 424 | layer { 425 | name: "Conv_35" 426 | type: "Convolution" 427 | bottom: "148" 428 | top: "166" 429 | convolution_param { 430 | num_output: 64 431 | bias_term: true 432 | group: 1 433 | pad_h: 0 434 | pad_w: 0 435 | kernel_h: 1 436 | kernel_w: 1 437 | stride_h: 1 438 | stride_w: 1 439 | dilation: 1 440 | } 441 | } 442 | layer { 443 | name: "Relu_36" 444 | type: "ReLU" 445 | bottom: "166" 446 | top: "167" 447 | } 448 | layer { 449 | name: "Concat_37" 450 | type: "Concat" 451 | bottom: "165" 452 | bottom: "167" 453 | top: "168" 454 | concat_param { 455 | axis: 1 456 | } 457 | } 458 | layer { 459 | name: "Conv_38" 460 | type: "Convolution" 461 | bottom: "168" 462 | top: "169" 463 | convolution_param { 464 | num_output: 128 465 | bias_term: true 466 | group: 1 467 | pad_h: 0 468 | pad_w: 0 469 | kernel_h: 1 470 | kernel_w: 1 471 | stride_h: 1 472 | stride_w: 1 473 | dilation: 1 474 | } 475 | } 476 | layer { 477 | name: "Relu_39" 478 | type: "ReLU" 479 | bottom: "169" 480 | top: "170" 481 | } 482 | layer { 483 | name: "Conv_40" 484 | type: "Convolution" 485 | bottom: "170" 486 | top: "171" 487 | convolution_param { 488 | num_output: 256 489 | bias_term: true 490 | group: 1 491 | pad_h: 1 492 | pad_w: 1 493 | kernel_h: 3 494 | kernel_w: 3 495 | stride_h: 2 496 | stride_w: 2 497 | dilation: 1 498 | } 499 | } 500 | layer { 501 | name: "Relu_41" 502 | type: "ReLU" 503 | bottom: "171" 504 | top: "172" 505 | } 506 | layer { 507 | name: "Conv_42" 508 | type: "Convolution" 509 | bottom: "172" 510 | top: "173" 511 | convolution_param { 512 | num_output: 128 513 | bias_term: true 514 | group: 1 515 | pad_h: 0 516 | pad_w: 0 517 | kernel_h: 1 518 | kernel_w: 1 519 | stride_h: 1 520 | stride_w: 1 521 | dilation: 1 522 | } 523 | } 524 | layer { 525 | name: "Relu_43" 526 | type: "ReLU" 527 | bottom: "173" 528 | top: "174" 529 | } 530 | layer { 531 | name: "Conv_44" 532 | type: "Convolution" 533 | bottom: "174" 534 | top: "175" 535 | convolution_param { 536 | num_output: 128 537 | bias_term: true 538 | group: 1 539 | pad_h: 0 540 | pad_w: 0 541 | kernel_h: 1 542 | kernel_w: 1 543 | stride_h: 1 544 | stride_w: 1 545 | dilation: 1 546 | } 547 | } 548 | layer { 549 | name: "Relu_45" 550 | type: "ReLU" 551 | bottom: "175" 552 | top: "176" 553 | } 554 | layer { 555 | name: "Conv_46" 556 | type: "Convolution" 557 | bottom: "176" 558 | top: "177" 559 | convolution_param { 560 | num_output: 128 561 | bias_term: true 562 | group: 1 563 | pad_h: 1 564 | pad_w: 1 565 | kernel_h: 3 566 | kernel_w: 3 567 | stride_h: 1 568 | stride_w: 1 569 | dilation: 1 570 | } 571 | } 572 | layer { 573 | name: "Relu_47" 574 | type: "ReLU" 575 | bottom: "177" 576 | top: "178" 577 | } 578 | layer { 579 | name: "Add_48" 580 | type: "Eltwise" 581 | bottom: "174" 582 | bottom: "178" 583 | top: "179" 584 | eltwise_param { 585 | operation: SUM 586 | } 587 | } 588 | layer { 589 | name: "Conv_49" 590 | type: "Convolution" 591 | bottom: "179" 592 | top: "180" 593 | convolution_param { 594 | num_output: 128 595 | bias_term: true 596 | group: 1 597 | pad_h: 0 598 | pad_w: 0 599 | kernel_h: 1 600 | kernel_w: 1 601 | stride_h: 1 602 | stride_w: 1 603 | dilation: 1 604 | } 605 | } 606 | layer { 607 | name: "Relu_50" 608 | type: "ReLU" 609 | bottom: "180" 610 | top: "181" 611 | } 612 | layer { 613 | name: "Conv_51" 614 | type: "Convolution" 615 | bottom: "181" 616 | top: "182" 617 | convolution_param { 618 | num_output: 128 619 | bias_term: true 620 | group: 1 621 | pad_h: 1 622 | pad_w: 1 623 | kernel_h: 3 624 | kernel_w: 3 625 | stride_h: 1 626 | stride_w: 1 627 | dilation: 1 628 | } 629 | } 630 | layer { 631 | name: "Relu_52" 632 | type: "ReLU" 633 | bottom: "182" 634 | top: "183" 635 | } 636 | layer { 637 | name: "Add_53" 638 | type: "Eltwise" 639 | bottom: "179" 640 | bottom: "183" 641 | top: "184" 642 | eltwise_param { 643 | operation: SUM 644 | } 645 | } 646 | layer { 647 | name: "Conv_54" 648 | type: "Convolution" 649 | bottom: "184" 650 | top: "185" 651 | convolution_param { 652 | num_output: 128 653 | bias_term: true 654 | group: 1 655 | pad_h: 0 656 | pad_w: 0 657 | kernel_h: 1 658 | kernel_w: 1 659 | stride_h: 1 660 | stride_w: 1 661 | dilation: 1 662 | } 663 | } 664 | layer { 665 | name: "Relu_55" 666 | type: "ReLU" 667 | bottom: "185" 668 | top: "186" 669 | } 670 | layer { 671 | name: "Conv_56" 672 | type: "Convolution" 673 | bottom: "186" 674 | top: "187" 675 | convolution_param { 676 | num_output: 128 677 | bias_term: true 678 | group: 1 679 | pad_h: 1 680 | pad_w: 1 681 | kernel_h: 3 682 | kernel_w: 3 683 | stride_h: 1 684 | stride_w: 1 685 | dilation: 1 686 | } 687 | } 688 | layer { 689 | name: "Relu_57" 690 | type: "ReLU" 691 | bottom: "187" 692 | top: "188" 693 | } 694 | layer { 695 | name: "Add_58" 696 | type: "Eltwise" 697 | bottom: "184" 698 | bottom: "188" 699 | top: "189" 700 | eltwise_param { 701 | operation: SUM 702 | } 703 | } 704 | layer { 705 | name: "Conv_59" 706 | type: "Convolution" 707 | bottom: "172" 708 | top: "190" 709 | convolution_param { 710 | num_output: 128 711 | bias_term: true 712 | group: 1 713 | pad_h: 0 714 | pad_w: 0 715 | kernel_h: 1 716 | kernel_w: 1 717 | stride_h: 1 718 | stride_w: 1 719 | dilation: 1 720 | } 721 | } 722 | layer { 723 | name: "Relu_60" 724 | type: "ReLU" 725 | bottom: "190" 726 | top: "191" 727 | } 728 | layer { 729 | name: "Concat_61" 730 | type: "Concat" 731 | bottom: "189" 732 | bottom: "191" 733 | top: "192" 734 | concat_param { 735 | axis: 1 736 | } 737 | } 738 | layer { 739 | name: "Conv_62" 740 | type: "Convolution" 741 | bottom: "192" 742 | top: "193" 743 | convolution_param { 744 | num_output: 256 745 | bias_term: true 746 | group: 1 747 | pad_h: 0 748 | pad_w: 0 749 | kernel_h: 1 750 | kernel_w: 1 751 | stride_h: 1 752 | stride_w: 1 753 | dilation: 1 754 | } 755 | } 756 | layer { 757 | name: "Relu_63" 758 | type: "ReLU" 759 | bottom: "193" 760 | top: "194" 761 | } 762 | layer { 763 | name: "Conv_64" 764 | type: "Convolution" 765 | bottom: "194" 766 | top: "195" 767 | convolution_param { 768 | num_output: 512 769 | bias_term: true 770 | group: 1 771 | pad_h: 1 772 | pad_w: 1 773 | kernel_h: 3 774 | kernel_w: 3 775 | stride_h: 2 776 | stride_w: 2 777 | dilation: 1 778 | } 779 | } 780 | layer { 781 | name: "Relu_65" 782 | type: "ReLU" 783 | bottom: "195" 784 | top: "196" 785 | } 786 | layer { 787 | name: "Conv_66" 788 | type: "Convolution" 789 | bottom: "196" 790 | top: "197" 791 | convolution_param { 792 | num_output: 256 793 | bias_term: true 794 | group: 1 795 | pad_h: 0 796 | pad_w: 0 797 | kernel_h: 1 798 | kernel_w: 1 799 | stride_h: 1 800 | stride_w: 1 801 | dilation: 1 802 | } 803 | } 804 | layer { 805 | name: "Relu_67" 806 | type: "ReLU" 807 | bottom: "197" 808 | top: "198" 809 | } 810 | layer { 811 | name: "MaxPool_68" 812 | type: "Pooling" 813 | bottom: "198" 814 | top: "199" 815 | pooling_param { 816 | pool: MAX 817 | kernel_h: 5 818 | kernel_w: 5 819 | stride_h: 1 820 | stride_w: 1 821 | pad_h: 2 822 | pad_w: 2 823 | } 824 | } 825 | layer { 826 | name: "MaxPool_69" 827 | type: "Pooling" 828 | bottom: "198" 829 | top: "200" 830 | pooling_param { 831 | pool: MAX 832 | kernel_h: 9 833 | kernel_w: 9 834 | stride_h: 1 835 | stride_w: 1 836 | pad_h: 4 837 | pad_w: 4 838 | } 839 | } 840 | layer { 841 | name: "MaxPool_70" 842 | type: "Pooling" 843 | bottom: "198" 844 | top: "201" 845 | pooling_param { 846 | pool: MAX 847 | kernel_h: 13 848 | kernel_w: 13 849 | stride_h: 1 850 | stride_w: 1 851 | pad_h: 6 852 | pad_w: 6 853 | } 854 | } 855 | layer { 856 | name: "Concat_71" 857 | type: "Concat" 858 | bottom: "198" 859 | bottom: "199" 860 | bottom: "200" 861 | bottom: "201" 862 | top: "202" 863 | concat_param { 864 | axis: 1 865 | } 866 | } 867 | layer { 868 | name: "Conv_72" 869 | type: "Convolution" 870 | bottom: "202" 871 | top: "203" 872 | convolution_param { 873 | num_output: 512 874 | bias_term: true 875 | group: 1 876 | pad_h: 0 877 | pad_w: 0 878 | kernel_h: 1 879 | kernel_w: 1 880 | stride_h: 1 881 | stride_w: 1 882 | dilation: 1 883 | } 884 | } 885 | layer { 886 | name: "Relu_73" 887 | type: "ReLU" 888 | bottom: "203" 889 | top: "204" 890 | } 891 | layer { 892 | name: "Conv_74" 893 | type: "Convolution" 894 | bottom: "204" 895 | top: "205" 896 | convolution_param { 897 | num_output: 256 898 | bias_term: true 899 | group: 1 900 | pad_h: 0 901 | pad_w: 0 902 | kernel_h: 1 903 | kernel_w: 1 904 | stride_h: 1 905 | stride_w: 1 906 | dilation: 1 907 | } 908 | } 909 | layer { 910 | name: "Relu_75" 911 | type: "ReLU" 912 | bottom: "205" 913 | top: "206" 914 | } 915 | layer { 916 | name: "Conv_76" 917 | type: "Convolution" 918 | bottom: "206" 919 | top: "207" 920 | convolution_param { 921 | num_output: 256 922 | bias_term: true 923 | group: 1 924 | pad_h: 0 925 | pad_w: 0 926 | kernel_h: 1 927 | kernel_w: 1 928 | stride_h: 1 929 | stride_w: 1 930 | dilation: 1 931 | } 932 | } 933 | layer { 934 | name: "Relu_77" 935 | type: "ReLU" 936 | bottom: "207" 937 | top: "208" 938 | } 939 | layer { 940 | name: "Conv_78" 941 | type: "Convolution" 942 | bottom: "208" 943 | top: "209" 944 | convolution_param { 945 | num_output: 256 946 | bias_term: true 947 | group: 1 948 | pad_h: 1 949 | pad_w: 1 950 | kernel_h: 3 951 | kernel_w: 3 952 | stride_h: 1 953 | stride_w: 1 954 | dilation: 1 955 | } 956 | } 957 | layer { 958 | name: "Relu_79" 959 | type: "ReLU" 960 | bottom: "209" 961 | top: "210" 962 | } 963 | layer { 964 | name: "Conv_80" 965 | type: "Convolution" 966 | bottom: "204" 967 | top: "211" 968 | convolution_param { 969 | num_output: 256 970 | bias_term: true 971 | group: 1 972 | pad_h: 0 973 | pad_w: 0 974 | kernel_h: 1 975 | kernel_w: 1 976 | stride_h: 1 977 | stride_w: 1 978 | dilation: 1 979 | } 980 | } 981 | layer { 982 | name: "Relu_81" 983 | type: "ReLU" 984 | bottom: "211" 985 | top: "212" 986 | } 987 | layer { 988 | name: "Concat_82" 989 | type: "Concat" 990 | bottom: "210" 991 | bottom: "212" 992 | top: "213" 993 | concat_param { 994 | axis: 1 995 | } 996 | } 997 | layer { 998 | name: "Conv_83" 999 | type: "Convolution" 1000 | bottom: "213" 1001 | top: "214" 1002 | convolution_param { 1003 | num_output: 512 1004 | bias_term: true 1005 | group: 1 1006 | pad_h: 0 1007 | pad_w: 0 1008 | kernel_h: 1 1009 | kernel_w: 1 1010 | stride_h: 1 1011 | stride_w: 1 1012 | dilation: 1 1013 | } 1014 | } 1015 | layer { 1016 | name: "Relu_84" 1017 | type: "ReLU" 1018 | bottom: "214" 1019 | top: "215" 1020 | } 1021 | layer { 1022 | name: "Conv_85" 1023 | type: "Convolution" 1024 | bottom: "215" 1025 | top: "216" 1026 | convolution_param { 1027 | num_output: 256 1028 | bias_term: true 1029 | group: 1 1030 | pad_h: 0 1031 | pad_w: 0 1032 | kernel_h: 1 1033 | kernel_w: 1 1034 | stride_h: 1 1035 | stride_w: 1 1036 | dilation: 1 1037 | } 1038 | } 1039 | layer { 1040 | name: "Relu_86" 1041 | type: "ReLU" 1042 | bottom: "216" 1043 | top: "217" 1044 | } 1045 | layer { 1046 | name: "ConvTranspose_87" 1047 | type: "Deconvolution" 1048 | bottom: "217" 1049 | top: "218" 1050 | convolution_param { 1051 | num_output: 256 1052 | bias_term: true 1053 | group: 1 1054 | pad_h: 0 1055 | pad_w: 0 1056 | kernel_h: 2 1057 | kernel_w: 2 1058 | stride_h: 2 1059 | stride_w: 2 1060 | } 1061 | } 1062 | layer { 1063 | name: "Concat_88" 1064 | type: "Concat" 1065 | bottom: "218" 1066 | bottom: "194" 1067 | top: "219" 1068 | concat_param { 1069 | axis: 1 1070 | } 1071 | } 1072 | layer { 1073 | name: "Conv_89" 1074 | type: "Convolution" 1075 | bottom: "219" 1076 | top: "220" 1077 | convolution_param { 1078 | num_output: 128 1079 | bias_term: true 1080 | group: 1 1081 | pad_h: 0 1082 | pad_w: 0 1083 | kernel_h: 1 1084 | kernel_w: 1 1085 | stride_h: 1 1086 | stride_w: 1 1087 | dilation: 1 1088 | } 1089 | } 1090 | layer { 1091 | name: "Relu_90" 1092 | type: "ReLU" 1093 | bottom: "220" 1094 | top: "221" 1095 | } 1096 | layer { 1097 | name: "Conv_91" 1098 | type: "Convolution" 1099 | bottom: "221" 1100 | top: "222" 1101 | convolution_param { 1102 | num_output: 128 1103 | bias_term: true 1104 | group: 1 1105 | pad_h: 0 1106 | pad_w: 0 1107 | kernel_h: 1 1108 | kernel_w: 1 1109 | stride_h: 1 1110 | stride_w: 1 1111 | dilation: 1 1112 | } 1113 | } 1114 | layer { 1115 | name: "Relu_92" 1116 | type: "ReLU" 1117 | bottom: "222" 1118 | top: "223" 1119 | } 1120 | layer { 1121 | name: "Conv_93" 1122 | type: "Convolution" 1123 | bottom: "223" 1124 | top: "224" 1125 | convolution_param { 1126 | num_output: 128 1127 | bias_term: true 1128 | group: 1 1129 | pad_h: 1 1130 | pad_w: 1 1131 | kernel_h: 3 1132 | kernel_w: 3 1133 | stride_h: 1 1134 | stride_w: 1 1135 | dilation: 1 1136 | } 1137 | } 1138 | layer { 1139 | name: "Relu_94" 1140 | type: "ReLU" 1141 | bottom: "224" 1142 | top: "225" 1143 | } 1144 | layer { 1145 | name: "Conv_95" 1146 | type: "Convolution" 1147 | bottom: "219" 1148 | top: "226" 1149 | convolution_param { 1150 | num_output: 128 1151 | bias_term: true 1152 | group: 1 1153 | pad_h: 0 1154 | pad_w: 0 1155 | kernel_h: 1 1156 | kernel_w: 1 1157 | stride_h: 1 1158 | stride_w: 1 1159 | dilation: 1 1160 | } 1161 | } 1162 | layer { 1163 | name: "Relu_96" 1164 | type: "ReLU" 1165 | bottom: "226" 1166 | top: "227" 1167 | } 1168 | layer { 1169 | name: "Concat_97" 1170 | type: "Concat" 1171 | bottom: "225" 1172 | bottom: "227" 1173 | top: "228" 1174 | concat_param { 1175 | axis: 1 1176 | } 1177 | } 1178 | layer { 1179 | name: "Conv_98" 1180 | type: "Convolution" 1181 | bottom: "228" 1182 | top: "229" 1183 | convolution_param { 1184 | num_output: 256 1185 | bias_term: true 1186 | group: 1 1187 | pad_h: 0 1188 | pad_w: 0 1189 | kernel_h: 1 1190 | kernel_w: 1 1191 | stride_h: 1 1192 | stride_w: 1 1193 | dilation: 1 1194 | } 1195 | } 1196 | layer { 1197 | name: "Relu_99" 1198 | type: "ReLU" 1199 | bottom: "229" 1200 | top: "230" 1201 | } 1202 | layer { 1203 | name: "Conv_100" 1204 | type: "Convolution" 1205 | bottom: "230" 1206 | top: "231" 1207 | convolution_param { 1208 | num_output: 128 1209 | bias_term: true 1210 | group: 1 1211 | pad_h: 0 1212 | pad_w: 0 1213 | kernel_h: 1 1214 | kernel_w: 1 1215 | stride_h: 1 1216 | stride_w: 1 1217 | dilation: 1 1218 | } 1219 | } 1220 | layer { 1221 | name: "Relu_101" 1222 | type: "ReLU" 1223 | bottom: "231" 1224 | top: "232" 1225 | } 1226 | layer { 1227 | name: "ConvTranspose_102" 1228 | type: "Deconvolution" 1229 | bottom: "232" 1230 | top: "233" 1231 | convolution_param { 1232 | num_output: 128 1233 | bias_term: true 1234 | group: 1 1235 | pad_h: 0 1236 | pad_w: 0 1237 | kernel_h: 2 1238 | kernel_w: 2 1239 | stride_h: 2 1240 | stride_w: 2 1241 | } 1242 | } 1243 | layer { 1244 | name: "Concat_103" 1245 | type: "Concat" 1246 | bottom: "233" 1247 | bottom: "170" 1248 | top: "234" 1249 | concat_param { 1250 | axis: 1 1251 | } 1252 | } 1253 | layer { 1254 | name: "Conv_104" 1255 | type: "Convolution" 1256 | bottom: "234" 1257 | top: "235" 1258 | convolution_param { 1259 | num_output: 64 1260 | bias_term: true 1261 | group: 1 1262 | pad_h: 0 1263 | pad_w: 0 1264 | kernel_h: 1 1265 | kernel_w: 1 1266 | stride_h: 1 1267 | stride_w: 1 1268 | dilation: 1 1269 | } 1270 | } 1271 | layer { 1272 | name: "Relu_105" 1273 | type: "ReLU" 1274 | bottom: "235" 1275 | top: "236" 1276 | } 1277 | layer { 1278 | name: "Conv_106" 1279 | type: "Convolution" 1280 | bottom: "236" 1281 | top: "237" 1282 | convolution_param { 1283 | num_output: 64 1284 | bias_term: true 1285 | group: 1 1286 | pad_h: 0 1287 | pad_w: 0 1288 | kernel_h: 1 1289 | kernel_w: 1 1290 | stride_h: 1 1291 | stride_w: 1 1292 | dilation: 1 1293 | } 1294 | } 1295 | layer { 1296 | name: "Relu_107" 1297 | type: "ReLU" 1298 | bottom: "237" 1299 | top: "238" 1300 | } 1301 | layer { 1302 | name: "Conv_108" 1303 | type: "Convolution" 1304 | bottom: "238" 1305 | top: "239" 1306 | convolution_param { 1307 | num_output: 64 1308 | bias_term: true 1309 | group: 1 1310 | pad_h: 1 1311 | pad_w: 1 1312 | kernel_h: 3 1313 | kernel_w: 3 1314 | stride_h: 1 1315 | stride_w: 1 1316 | dilation: 1 1317 | } 1318 | } 1319 | layer { 1320 | name: "Relu_109" 1321 | type: "ReLU" 1322 | bottom: "239" 1323 | top: "240" 1324 | } 1325 | layer { 1326 | name: "Conv_110" 1327 | type: "Convolution" 1328 | bottom: "234" 1329 | top: "241" 1330 | convolution_param { 1331 | num_output: 64 1332 | bias_term: true 1333 | group: 1 1334 | pad_h: 0 1335 | pad_w: 0 1336 | kernel_h: 1 1337 | kernel_w: 1 1338 | stride_h: 1 1339 | stride_w: 1 1340 | dilation: 1 1341 | } 1342 | } 1343 | layer { 1344 | name: "Relu_111" 1345 | type: "ReLU" 1346 | bottom: "241" 1347 | top: "242" 1348 | } 1349 | layer { 1350 | name: "Concat_112" 1351 | type: "Concat" 1352 | bottom: "240" 1353 | bottom: "242" 1354 | top: "243" 1355 | concat_param { 1356 | axis: 1 1357 | } 1358 | } 1359 | layer { 1360 | name: "Conv_113" 1361 | type: "Convolution" 1362 | bottom: "243" 1363 | top: "244" 1364 | convolution_param { 1365 | num_output: 128 1366 | bias_term: true 1367 | group: 1 1368 | pad_h: 0 1369 | pad_w: 0 1370 | kernel_h: 1 1371 | kernel_w: 1 1372 | stride_h: 1 1373 | stride_w: 1 1374 | dilation: 1 1375 | } 1376 | } 1377 | layer { 1378 | name: "Relu_114" 1379 | type: "ReLU" 1380 | bottom: "244" 1381 | top: "245" 1382 | } 1383 | layer { 1384 | name: "Conv_115" 1385 | type: "Convolution" 1386 | bottom: "245" 1387 | top: "246" 1388 | convolution_param { 1389 | num_output: 128 1390 | bias_term: true 1391 | group: 1 1392 | pad_h: 1 1393 | pad_w: 1 1394 | kernel_h: 3 1395 | kernel_w: 3 1396 | stride_h: 2 1397 | stride_w: 2 1398 | dilation: 1 1399 | } 1400 | } 1401 | layer { 1402 | name: "Relu_116" 1403 | type: "ReLU" 1404 | bottom: "246" 1405 | top: "247" 1406 | } 1407 | layer { 1408 | name: "Concat_117" 1409 | type: "Concat" 1410 | bottom: "247" 1411 | bottom: "232" 1412 | top: "248" 1413 | concat_param { 1414 | axis: 1 1415 | } 1416 | } 1417 | layer { 1418 | name: "Conv_118" 1419 | type: "Convolution" 1420 | bottom: "248" 1421 | top: "249" 1422 | convolution_param { 1423 | num_output: 128 1424 | bias_term: true 1425 | group: 1 1426 | pad_h: 0 1427 | pad_w: 0 1428 | kernel_h: 1 1429 | kernel_w: 1 1430 | stride_h: 1 1431 | stride_w: 1 1432 | dilation: 1 1433 | } 1434 | } 1435 | layer { 1436 | name: "Relu_119" 1437 | type: "ReLU" 1438 | bottom: "249" 1439 | top: "250" 1440 | } 1441 | layer { 1442 | name: "Conv_120" 1443 | type: "Convolution" 1444 | bottom: "250" 1445 | top: "251" 1446 | convolution_param { 1447 | num_output: 128 1448 | bias_term: true 1449 | group: 1 1450 | pad_h: 0 1451 | pad_w: 0 1452 | kernel_h: 1 1453 | kernel_w: 1 1454 | stride_h: 1 1455 | stride_w: 1 1456 | dilation: 1 1457 | } 1458 | } 1459 | layer { 1460 | name: "Relu_121" 1461 | type: "ReLU" 1462 | bottom: "251" 1463 | top: "252" 1464 | } 1465 | layer { 1466 | name: "Conv_122" 1467 | type: "Convolution" 1468 | bottom: "252" 1469 | top: "253" 1470 | convolution_param { 1471 | num_output: 128 1472 | bias_term: true 1473 | group: 1 1474 | pad_h: 1 1475 | pad_w: 1 1476 | kernel_h: 3 1477 | kernel_w: 3 1478 | stride_h: 1 1479 | stride_w: 1 1480 | dilation: 1 1481 | } 1482 | } 1483 | layer { 1484 | name: "Relu_123" 1485 | type: "ReLU" 1486 | bottom: "253" 1487 | top: "254" 1488 | } 1489 | layer { 1490 | name: "Conv_124" 1491 | type: "Convolution" 1492 | bottom: "248" 1493 | top: "255" 1494 | convolution_param { 1495 | num_output: 128 1496 | bias_term: true 1497 | group: 1 1498 | pad_h: 0 1499 | pad_w: 0 1500 | kernel_h: 1 1501 | kernel_w: 1 1502 | stride_h: 1 1503 | stride_w: 1 1504 | dilation: 1 1505 | } 1506 | } 1507 | layer { 1508 | name: "Relu_125" 1509 | type: "ReLU" 1510 | bottom: "255" 1511 | top: "256" 1512 | } 1513 | layer { 1514 | name: "Concat_126" 1515 | type: "Concat" 1516 | bottom: "254" 1517 | bottom: "256" 1518 | top: "257" 1519 | concat_param { 1520 | axis: 1 1521 | } 1522 | } 1523 | layer { 1524 | name: "Conv_127" 1525 | type: "Convolution" 1526 | bottom: "257" 1527 | top: "258" 1528 | convolution_param { 1529 | num_output: 256 1530 | bias_term: true 1531 | group: 1 1532 | pad_h: 0 1533 | pad_w: 0 1534 | kernel_h: 1 1535 | kernel_w: 1 1536 | stride_h: 1 1537 | stride_w: 1 1538 | dilation: 1 1539 | } 1540 | } 1541 | layer { 1542 | name: "Relu_128" 1543 | type: "ReLU" 1544 | bottom: "258" 1545 | top: "259" 1546 | } 1547 | layer { 1548 | name: "Conv_129" 1549 | type: "Convolution" 1550 | bottom: "259" 1551 | top: "260" 1552 | convolution_param { 1553 | num_output: 256 1554 | bias_term: true 1555 | group: 1 1556 | pad_h: 1 1557 | pad_w: 1 1558 | kernel_h: 3 1559 | kernel_w: 3 1560 | stride_h: 2 1561 | stride_w: 2 1562 | dilation: 1 1563 | } 1564 | } 1565 | layer { 1566 | name: "Relu_130" 1567 | type: "ReLU" 1568 | bottom: "260" 1569 | top: "261" 1570 | } 1571 | layer { 1572 | name: "Concat_131" 1573 | type: "Concat" 1574 | bottom: "261" 1575 | bottom: "217" 1576 | top: "262" 1577 | concat_param { 1578 | axis: 1 1579 | } 1580 | } 1581 | layer { 1582 | name: "Conv_132" 1583 | type: "Convolution" 1584 | bottom: "262" 1585 | top: "263" 1586 | convolution_param { 1587 | num_output: 256 1588 | bias_term: true 1589 | group: 1 1590 | pad_h: 0 1591 | pad_w: 0 1592 | kernel_h: 1 1593 | kernel_w: 1 1594 | stride_h: 1 1595 | stride_w: 1 1596 | dilation: 1 1597 | } 1598 | } 1599 | layer { 1600 | name: "Relu_133" 1601 | type: "ReLU" 1602 | bottom: "263" 1603 | top: "264" 1604 | } 1605 | layer { 1606 | name: "Conv_134" 1607 | type: "Convolution" 1608 | bottom: "264" 1609 | top: "265" 1610 | convolution_param { 1611 | num_output: 256 1612 | bias_term: true 1613 | group: 1 1614 | pad_h: 0 1615 | pad_w: 0 1616 | kernel_h: 1 1617 | kernel_w: 1 1618 | stride_h: 1 1619 | stride_w: 1 1620 | dilation: 1 1621 | } 1622 | } 1623 | layer { 1624 | name: "Relu_135" 1625 | type: "ReLU" 1626 | bottom: "265" 1627 | top: "266" 1628 | } 1629 | layer { 1630 | name: "Conv_136" 1631 | type: "Convolution" 1632 | bottom: "266" 1633 | top: "267" 1634 | convolution_param { 1635 | num_output: 256 1636 | bias_term: true 1637 | group: 1 1638 | pad_h: 1 1639 | pad_w: 1 1640 | kernel_h: 3 1641 | kernel_w: 3 1642 | stride_h: 1 1643 | stride_w: 1 1644 | dilation: 1 1645 | } 1646 | } 1647 | layer { 1648 | name: "Relu_137" 1649 | type: "ReLU" 1650 | bottom: "267" 1651 | top: "268" 1652 | } 1653 | layer { 1654 | name: "Conv_138" 1655 | type: "Convolution" 1656 | bottom: "262" 1657 | top: "269" 1658 | convolution_param { 1659 | num_output: 256 1660 | bias_term: true 1661 | group: 1 1662 | pad_h: 0 1663 | pad_w: 0 1664 | kernel_h: 1 1665 | kernel_w: 1 1666 | stride_h: 1 1667 | stride_w: 1 1668 | dilation: 1 1669 | } 1670 | } 1671 | layer { 1672 | name: "Relu_139" 1673 | type: "ReLU" 1674 | bottom: "269" 1675 | top: "270" 1676 | } 1677 | layer { 1678 | name: "Concat_140" 1679 | type: "Concat" 1680 | bottom: "268" 1681 | bottom: "270" 1682 | top: "271" 1683 | concat_param { 1684 | axis: 1 1685 | } 1686 | } 1687 | layer { 1688 | name: "Conv_141" 1689 | type: "Convolution" 1690 | bottom: "271" 1691 | top: "272" 1692 | convolution_param { 1693 | num_output: 512 1694 | bias_term: true 1695 | group: 1 1696 | pad_h: 0 1697 | pad_w: 0 1698 | kernel_h: 1 1699 | kernel_w: 1 1700 | stride_h: 1 1701 | stride_w: 1 1702 | dilation: 1 1703 | } 1704 | } 1705 | layer { 1706 | name: "Relu_142" 1707 | type: "ReLU" 1708 | bottom: "272" 1709 | top: "273" 1710 | } 1711 | layer { 1712 | name: "Conv_143" 1713 | type: "Convolution" 1714 | bottom: "245" 1715 | top: "274" 1716 | convolution_param { 1717 | num_output: 21 1718 | bias_term: true 1719 | group: 1 1720 | pad_h: 0 1721 | pad_w: 0 1722 | kernel_h: 1 1723 | kernel_w: 1 1724 | stride_h: 1 1725 | stride_w: 1 1726 | dilation: 1 1727 | } 1728 | } 1729 | layer { 1730 | name: "Reshape_157" 1731 | type: "Reshape" 1732 | bottom: "274" 1733 | top: "292" 1734 | reshape_param { 1735 | shape { 1736 | dim: 0 1737 | dim: 3 1738 | dim: 7 1739 | dim: 6400 1740 | } 1741 | } 1742 | } 1743 | layer { 1744 | name: "Conv_159" 1745 | type: "Convolution" 1746 | bottom: "259" 1747 | top: "294" 1748 | convolution_param { 1749 | num_output: 21 1750 | bias_term: true 1751 | group: 1 1752 | pad_h: 0 1753 | pad_w: 0 1754 | kernel_h: 1 1755 | kernel_w: 1 1756 | stride_h: 1 1757 | stride_w: 1 1758 | dilation: 1 1759 | } 1760 | } 1761 | layer { 1762 | name: "Reshape_173" 1763 | type: "Reshape" 1764 | bottom: "294" 1765 | top: "312" 1766 | reshape_param { 1767 | shape { 1768 | dim: 0 1769 | dim: 3 1770 | dim: 7 1771 | dim: 1600 1772 | } 1773 | } 1774 | } 1775 | 1776 | layer { 1777 | name: "Conv_175" 1778 | type: "Convolution" 1779 | bottom: "273" 1780 | top: "314" 1781 | convolution_param { 1782 | num_output: 21 1783 | bias_term: true 1784 | group: 1 1785 | pad_h: 0 1786 | pad_w: 0 1787 | kernel_h: 1 1788 | kernel_w: 1 1789 | stride_h: 1 1790 | stride_w: 1 1791 | dilation: 1 1792 | } 1793 | } 1794 | layer { 1795 | name: "Reshape_189" 1796 | type: "Reshape" 1797 | bottom: "314" 1798 | top: "332" 1799 | reshape_param { 1800 | shape { 1801 | dim: 0 1802 | dim: 3 1803 | dim: 7 1804 | dim: 400 1805 | } 1806 | } 1807 | } 1808 | 1809 | 1810 | --------------------------------------------------------------------------------