└── res2net_block.py /res2net_block.py: -------------------------------------------------------------------------------- 1 | from keras.layers import Conv2D, GlobalAveragePooling2D, Dense, BatchNormalization, Activation, Lambda, Concatenate, Add, Multiply 2 | 3 | def res2net_bottleneck_block(x, f, s=4, expansion=4, use_se_block=False): 4 | """ 5 | Arguments: 6 | x: input tensor 7 | f: number of output channels 8 | s: scale dimension 9 | """ 10 | 11 | num_channels = int(x._keras_shape[-1]) 12 | assert num_channels % s == 0, f"Number of input channel should be a multiple of s. Received nc={num_channels} and s={s}." 13 | 14 | input_tensor = x 15 | 16 | # Conv 1x1 17 | x = BatchNormalization()(x) 18 | x = Activation('relu')(x) 19 | x = Conv2D(f, 1, kernel_initializer='he_normal', use_bias=False)(x) 20 | 21 | # Conv 3x3 22 | subset_x = [] 23 | n = f 24 | w = n // s 25 | for i in range(s): 26 | slice_x = Lambda(lambda x: x[..., i*w:(i+1)*w])(x) 27 | if i > 1: 28 | slice_x = Add()([slice_x, subset_x[-1]]) 29 | if i > 0: 30 | slice_x = BatchNormalization()(slice_x) 31 | slice_x = Activation('relu')(slice_x) 32 | slice_x = Conv2D(w, 3, kernel_initializer='he_normal', padding='same', use_bias=False)(slice_x) 33 | subset_x.append(slice_x) 34 | x = Concatenate()(subset_x) 35 | 36 | # Conv 1x1 37 | x = BatchNormalization()(x) 38 | x = Activation('relu')(x) 39 | x = Conv2D(f*expansion, 1, kernel_initializer='he_normal', use_bias=False)(x) 40 | 41 | if use_se_block: 42 | x = se_block(x) 43 | 44 | # Add 45 | if num_channels == f*expansion: 46 | skip = input_tensor 47 | else: 48 | skip = input_tensor 49 | skip = Conv2D(f*expansion, 1, kernel_initializer='he_normal')(skip) 50 | out = Add()([x, skip]) 51 | return out 52 | 53 | def se_block(input_tensor, c=16): 54 | num_channels = int(input_tensor._keras_shape[-1]) # Tensorflow backend 55 | bottleneck = int(num_channels // c) 56 | 57 | se_branch = GlobalAveragePooling2D()(input_tensor) 58 | se_branch = Dense(bottleneck, use_bias=False, activation='relu')(se_branch) 59 | se_branch = Dense(num_channels, use_bias=False, activation='sigmoid')(se_branch) 60 | 61 | out = Multiply()([input_tensor, se_branch]) 62 | return out --------------------------------------------------------------------------------