Skip to content

Instantly share code, notes, and snippets.

@hgaiser
Created May 17, 2016 07:43
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hgaiser/c6616ade16ff920bd55509520fb1386d to your computer and use it in GitHub Desktop.
Save hgaiser/c6616ade16ff920bd55509520fb1386d to your computer and use it in GitHub Desktop.
name: "ResNet-101"
layer {
name: 'input-data'
type: 'Python'
top: 'data'
top: 'im_info'
top: 'gt_boxes'
python_param {
module: 'roi_data_layer.layer'
layer: 'RoIDataLayer'
param_str: "'num_classes': 40"
}
}
layer {
bottom: "data"
top: "conv1"
name: "conv1"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 7
pad: 3
stride: 2
bias_term: false
}
}
layer {
bottom: "conv1"
top: "conv1"
name: "bn_conv1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv1"
top: "conv1"
name: "scale_conv1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "conv1"
bottom: "conv1"
name: "conv1_relu"
type: "ReLU"
}
layer {
bottom: "conv1"
top: "pool1"
name: "pool1"
type: "Pooling"
pooling_param {
kernel_size: 3
stride: 2
pool: MAX
}
}
layer {
bottom: "pool1"
top: "res2a_branch1"
name: "res2a_branch1"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2a_branch1"
top: "res2a_branch1"
name: "bn2a_branch1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2a_branch1"
top: "res2a_branch1"
name: "scale2a_branch1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "pool1"
top: "res2a_branch2a"
name: "res2a_branch2a"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2a_branch2a"
top: "res2a_branch2a"
name: "bn2a_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2a_branch2a"
top: "res2a_branch2a"
name: "scale2a_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2a_branch2a"
bottom: "res2a_branch2a"
name: "res2a_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res2a_branch2a"
top: "res2a_branch2b"
name: "res2a_branch2b"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res2a_branch2b"
top: "res2a_branch2b"
name: "bn2a_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2a_branch2b"
top: "res2a_branch2b"
name: "scale2a_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2a_branch2b"
bottom: "res2a_branch2b"
name: "res2a_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res2a_branch2b"
top: "res2a_branch2c"
name: "res2a_branch2c"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2a_branch2c"
top: "res2a_branch2c"
name: "bn2a_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2a_branch2c"
top: "res2a_branch2c"
name: "scale2a_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res2a_branch1"
bottom: "res2a_branch2c"
top: "res2a"
name: "res2a"
type: "Eltwise"
}
layer {
bottom: "res2a"
top: "res2a"
name: "res2a_relu"
type: "ReLU"
}
layer {
bottom: "res2a"
top: "res2b_branch2a"
name: "res2b_branch2a"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2b_branch2a"
top: "res2b_branch2a"
name: "bn2b_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2b_branch2a"
top: "res2b_branch2a"
name: "scale2b_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2b_branch2a"
bottom: "res2b_branch2a"
name: "res2b_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res2b_branch2a"
top: "res2b_branch2b"
name: "res2b_branch2b"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res2b_branch2b"
top: "res2b_branch2b"
name: "bn2b_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2b_branch2b"
top: "res2b_branch2b"
name: "scale2b_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2b_branch2b"
bottom: "res2b_branch2b"
name: "res2b_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res2b_branch2b"
top: "res2b_branch2c"
name: "res2b_branch2c"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2b_branch2c"
top: "res2b_branch2c"
name: "bn2b_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2b_branch2c"
top: "res2b_branch2c"
name: "scale2b_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res2a"
bottom: "res2b_branch2c"
top: "res2b"
name: "res2b"
type: "Eltwise"
}
layer {
bottom: "res2b"
top: "res2b"
name: "res2b_relu"
type: "ReLU"
}
layer {
bottom: "res2b"
top: "res2c_branch2a"
name: "res2c_branch2a"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2c_branch2a"
top: "res2c_branch2a"
name: "bn2c_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2c_branch2a"
top: "res2c_branch2a"
name: "scale2c_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2c_branch2a"
bottom: "res2c_branch2a"
name: "res2c_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res2c_branch2a"
top: "res2c_branch2b"
name: "res2c_branch2b"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res2c_branch2b"
top: "res2c_branch2b"
name: "bn2c_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2c_branch2b"
top: "res2c_branch2b"
name: "scale2c_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res2c_branch2b"
bottom: "res2c_branch2b"
name: "res2c_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res2c_branch2b"
top: "res2c_branch2c"
name: "res2c_branch2c"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res2c_branch2c"
top: "res2c_branch2c"
name: "bn2c_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res2c_branch2c"
top: "res2c_branch2c"
name: "scale2c_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res2b"
bottom: "res2c_branch2c"
top: "res2c"
name: "res2c"
type: "Eltwise"
}
layer {
bottom: "res2c"
top: "res2c"
name: "res2c_relu"
type: "ReLU"
}
layer {
bottom: "res2c"
top: "res3a_branch1"
name: "res3a_branch1"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res3a_branch1"
top: "res3a_branch1"
name: "bn3a_branch1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3a_branch1"
top: "res3a_branch1"
name: "scale3a_branch1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res2c"
top: "res3a_branch2a"
name: "res3a_branch2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res3a_branch2a"
top: "res3a_branch2a"
name: "bn3a_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3a_branch2a"
top: "res3a_branch2a"
name: "scale3a_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3a_branch2a"
bottom: "res3a_branch2a"
name: "res3a_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res3a_branch2a"
top: "res3a_branch2b"
name: "res3a_branch2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res3a_branch2b"
top: "res3a_branch2b"
name: "bn3a_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3a_branch2b"
top: "res3a_branch2b"
name: "scale3a_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3a_branch2b"
bottom: "res3a_branch2b"
name: "res3a_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res3a_branch2b"
top: "res3a_branch2c"
name: "res3a_branch2c"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3a_branch2c"
top: "res3a_branch2c"
name: "bn3a_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3a_branch2c"
top: "res3a_branch2c"
name: "scale3a_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res3a_branch1"
bottom: "res3a_branch2c"
top: "res3a"
name: "res3a"
type: "Eltwise"
}
layer {
bottom: "res3a"
top: "res3a"
name: "res3a_relu"
type: "ReLU"
}
layer {
bottom: "res3a"
top: "res3b1_branch2a"
name: "res3b1_branch2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b1_branch2a"
top: "res3b1_branch2a"
name: "bn3b1_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b1_branch2a"
top: "res3b1_branch2a"
name: "scale3b1_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b1_branch2a"
bottom: "res3b1_branch2a"
name: "res3b1_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res3b1_branch2a"
top: "res3b1_branch2b"
name: "res3b1_branch2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b1_branch2b"
top: "res3b1_branch2b"
name: "bn3b1_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b1_branch2b"
top: "res3b1_branch2b"
name: "scale3b1_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b1_branch2b"
bottom: "res3b1_branch2b"
name: "res3b1_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res3b1_branch2b"
top: "res3b1_branch2c"
name: "res3b1_branch2c"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b1_branch2c"
top: "res3b1_branch2c"
name: "bn3b1_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b1_branch2c"
top: "res3b1_branch2c"
name: "scale3b1_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res3a"
bottom: "res3b1_branch2c"
top: "res3b1"
name: "res3b1"
type: "Eltwise"
}
layer {
bottom: "res3b1"
top: "res3b1"
name: "res3b1_relu"
type: "ReLU"
}
layer {
bottom: "res3b1"
top: "res3b2_branch2a"
name: "res3b2_branch2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b2_branch2a"
top: "res3b2_branch2a"
name: "bn3b2_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b2_branch2a"
top: "res3b2_branch2a"
name: "scale3b2_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b2_branch2a"
bottom: "res3b2_branch2a"
name: "res3b2_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res3b2_branch2a"
top: "res3b2_branch2b"
name: "res3b2_branch2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b2_branch2b"
top: "res3b2_branch2b"
name: "bn3b2_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b2_branch2b"
top: "res3b2_branch2b"
name: "scale3b2_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b2_branch2b"
bottom: "res3b2_branch2b"
name: "res3b2_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res3b2_branch2b"
top: "res3b2_branch2c"
name: "res3b2_branch2c"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b2_branch2c"
top: "res3b2_branch2c"
name: "bn3b2_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b2_branch2c"
top: "res3b2_branch2c"
name: "scale3b2_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res3b1"
bottom: "res3b2_branch2c"
top: "res3b2"
name: "res3b2"
type: "Eltwise"
}
layer {
bottom: "res3b2"
top: "res3b2"
name: "res3b2_relu"
type: "ReLU"
}
layer {
bottom: "res3b2"
top: "res3b3_branch2a"
name: "res3b3_branch2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b3_branch2a"
top: "res3b3_branch2a"
name: "bn3b3_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b3_branch2a"
top: "res3b3_branch2a"
name: "scale3b3_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b3_branch2a"
bottom: "res3b3_branch2a"
name: "res3b3_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res3b3_branch2a"
top: "res3b3_branch2b"
name: "res3b3_branch2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b3_branch2b"
top: "res3b3_branch2b"
name: "bn3b3_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b3_branch2b"
top: "res3b3_branch2b"
name: "scale3b3_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res3b3_branch2b"
bottom: "res3b3_branch2b"
name: "res3b3_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res3b3_branch2b"
top: "res3b3_branch2c"
name: "res3b3_branch2c"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res3b3_branch2c"
top: "res3b3_branch2c"
name: "bn3b3_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res3b3_branch2c"
top: "res3b3_branch2c"
name: "scale3b3_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res3b2"
bottom: "res3b3_branch2c"
top: "res3b3"
name: "res3b3"
type: "Eltwise"
}
layer {
bottom: "res3b3"
top: "res3b3"
name: "res3b3_relu"
type: "ReLU"
}
layer {
bottom: "res3b3"
top: "res4a_branch1"
name: "res4a_branch1"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res4a_branch1"
top: "res4a_branch1"
name: "bn4a_branch1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4a_branch1"
top: "res4a_branch1"
name: "scale4a_branch1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res3b3"
top: "res4a_branch2a"
name: "res4a_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res4a_branch2a"
top: "res4a_branch2a"
name: "bn4a_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4a_branch2a"
top: "res4a_branch2a"
name: "scale4a_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4a_branch2a"
bottom: "res4a_branch2a"
name: "res4a_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4a_branch2a"
top: "res4a_branch2b"
name: "res4a_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4a_branch2b"
top: "res4a_branch2b"
name: "bn4a_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4a_branch2b"
top: "res4a_branch2b"
name: "scale4a_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4a_branch2b"
bottom: "res4a_branch2b"
name: "res4a_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4a_branch2b"
top: "res4a_branch2c"
name: "res4a_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4a_branch2c"
top: "res4a_branch2c"
name: "bn4a_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4a_branch2c"
top: "res4a_branch2c"
name: "scale4a_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4a_branch1"
bottom: "res4a_branch2c"
top: "res4a"
name: "res4a"
type: "Eltwise"
}
layer {
bottom: "res4a"
top: "res4a"
name: "res4a_relu"
type: "ReLU"
}
layer {
bottom: "res4a"
top: "res4b1_branch2a"
name: "res4b1_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b1_branch2a"
top: "res4b1_branch2a"
name: "bn4b1_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b1_branch2a"
top: "res4b1_branch2a"
name: "scale4b1_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b1_branch2a"
bottom: "res4b1_branch2a"
name: "res4b1_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b1_branch2a"
top: "res4b1_branch2b"
name: "res4b1_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b1_branch2b"
top: "res4b1_branch2b"
name: "bn4b1_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b1_branch2b"
top: "res4b1_branch2b"
name: "scale4b1_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b1_branch2b"
bottom: "res4b1_branch2b"
name: "res4b1_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b1_branch2b"
top: "res4b1_branch2c"
name: "res4b1_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b1_branch2c"
top: "res4b1_branch2c"
name: "bn4b1_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b1_branch2c"
top: "res4b1_branch2c"
name: "scale4b1_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4a"
bottom: "res4b1_branch2c"
top: "res4b1"
name: "res4b1"
type: "Eltwise"
}
layer {
bottom: "res4b1"
top: "res4b1"
name: "res4b1_relu"
type: "ReLU"
}
layer {
bottom: "res4b1"
top: "res4b2_branch2a"
name: "res4b2_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b2_branch2a"
top: "res4b2_branch2a"
name: "bn4b2_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b2_branch2a"
top: "res4b2_branch2a"
name: "scale4b2_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b2_branch2a"
bottom: "res4b2_branch2a"
name: "res4b2_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b2_branch2a"
top: "res4b2_branch2b"
name: "res4b2_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b2_branch2b"
top: "res4b2_branch2b"
name: "bn4b2_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b2_branch2b"
top: "res4b2_branch2b"
name: "scale4b2_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b2_branch2b"
bottom: "res4b2_branch2b"
name: "res4b2_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b2_branch2b"
top: "res4b2_branch2c"
name: "res4b2_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b2_branch2c"
top: "res4b2_branch2c"
name: "bn4b2_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b2_branch2c"
top: "res4b2_branch2c"
name: "scale4b2_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b1"
bottom: "res4b2_branch2c"
top: "res4b2"
name: "res4b2"
type: "Eltwise"
}
layer {
bottom: "res4b2"
top: "res4b2"
name: "res4b2_relu"
type: "ReLU"
}
layer {
bottom: "res4b2"
top: "res4b3_branch2a"
name: "res4b3_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b3_branch2a"
top: "res4b3_branch2a"
name: "bn4b3_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b3_branch2a"
top: "res4b3_branch2a"
name: "scale4b3_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b3_branch2a"
bottom: "res4b3_branch2a"
name: "res4b3_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b3_branch2a"
top: "res4b3_branch2b"
name: "res4b3_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b3_branch2b"
top: "res4b3_branch2b"
name: "bn4b3_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b3_branch2b"
top: "res4b3_branch2b"
name: "scale4b3_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b3_branch2b"
bottom: "res4b3_branch2b"
name: "res4b3_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b3_branch2b"
top: "res4b3_branch2c"
name: "res4b3_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b3_branch2c"
top: "res4b3_branch2c"
name: "bn4b3_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b3_branch2c"
top: "res4b3_branch2c"
name: "scale4b3_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b2"
bottom: "res4b3_branch2c"
top: "res4b3"
name: "res4b3"
type: "Eltwise"
}
layer {
bottom: "res4b3"
top: "res4b3"
name: "res4b3_relu"
type: "ReLU"
}
layer {
bottom: "res4b3"
top: "res4b4_branch2a"
name: "res4b4_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b4_branch2a"
top: "res4b4_branch2a"
name: "bn4b4_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b4_branch2a"
top: "res4b4_branch2a"
name: "scale4b4_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b4_branch2a"
bottom: "res4b4_branch2a"
name: "res4b4_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b4_branch2a"
top: "res4b4_branch2b"
name: "res4b4_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b4_branch2b"
top: "res4b4_branch2b"
name: "bn4b4_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b4_branch2b"
top: "res4b4_branch2b"
name: "scale4b4_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b4_branch2b"
bottom: "res4b4_branch2b"
name: "res4b4_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b4_branch2b"
top: "res4b4_branch2c"
name: "res4b4_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b4_branch2c"
top: "res4b4_branch2c"
name: "bn4b4_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b4_branch2c"
top: "res4b4_branch2c"
name: "scale4b4_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b3"
bottom: "res4b4_branch2c"
top: "res4b4"
name: "res4b4"
type: "Eltwise"
}
layer {
bottom: "res4b4"
top: "res4b4"
name: "res4b4_relu"
type: "ReLU"
}
layer {
bottom: "res4b4"
top: "res4b5_branch2a"
name: "res4b5_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b5_branch2a"
top: "res4b5_branch2a"
name: "bn4b5_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b5_branch2a"
top: "res4b5_branch2a"
name: "scale4b5_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b5_branch2a"
bottom: "res4b5_branch2a"
name: "res4b5_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b5_branch2a"
top: "res4b5_branch2b"
name: "res4b5_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b5_branch2b"
top: "res4b5_branch2b"
name: "bn4b5_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b5_branch2b"
top: "res4b5_branch2b"
name: "scale4b5_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b5_branch2b"
bottom: "res4b5_branch2b"
name: "res4b5_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b5_branch2b"
top: "res4b5_branch2c"
name: "res4b5_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b5_branch2c"
top: "res4b5_branch2c"
name: "bn4b5_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b5_branch2c"
top: "res4b5_branch2c"
name: "scale4b5_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b4"
bottom: "res4b5_branch2c"
top: "res4b5"
name: "res4b5"
type: "Eltwise"
}
layer {
bottom: "res4b5"
top: "res4b5"
name: "res4b5_relu"
type: "ReLU"
}
layer {
bottom: "res4b5"
top: "res4b6_branch2a"
name: "res4b6_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b6_branch2a"
top: "res4b6_branch2a"
name: "bn4b6_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b6_branch2a"
top: "res4b6_branch2a"
name: "scale4b6_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b6_branch2a"
bottom: "res4b6_branch2a"
name: "res4b6_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b6_branch2a"
top: "res4b6_branch2b"
name: "res4b6_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b6_branch2b"
top: "res4b6_branch2b"
name: "bn4b6_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b6_branch2b"
top: "res4b6_branch2b"
name: "scale4b6_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b6_branch2b"
bottom: "res4b6_branch2b"
name: "res4b6_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b6_branch2b"
top: "res4b6_branch2c"
name: "res4b6_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b6_branch2c"
top: "res4b6_branch2c"
name: "bn4b6_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b6_branch2c"
top: "res4b6_branch2c"
name: "scale4b6_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b5"
bottom: "res4b6_branch2c"
top: "res4b6"
name: "res4b6"
type: "Eltwise"
}
layer {
bottom: "res4b6"
top: "res4b6"
name: "res4b6_relu"
type: "ReLU"
}
layer {
bottom: "res4b6"
top: "res4b7_branch2a"
name: "res4b7_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b7_branch2a"
top: "res4b7_branch2a"
name: "bn4b7_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b7_branch2a"
top: "res4b7_branch2a"
name: "scale4b7_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b7_branch2a"
bottom: "res4b7_branch2a"
name: "res4b7_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b7_branch2a"
top: "res4b7_branch2b"
name: "res4b7_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b7_branch2b"
top: "res4b7_branch2b"
name: "bn4b7_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b7_branch2b"
top: "res4b7_branch2b"
name: "scale4b7_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b7_branch2b"
bottom: "res4b7_branch2b"
name: "res4b7_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b7_branch2b"
top: "res4b7_branch2c"
name: "res4b7_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b7_branch2c"
top: "res4b7_branch2c"
name: "bn4b7_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b7_branch2c"
top: "res4b7_branch2c"
name: "scale4b7_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b6"
bottom: "res4b7_branch2c"
top: "res4b7"
name: "res4b7"
type: "Eltwise"
}
layer {
bottom: "res4b7"
top: "res4b7"
name: "res4b7_relu"
type: "ReLU"
}
layer {
bottom: "res4b7"
top: "res4b8_branch2a"
name: "res4b8_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b8_branch2a"
top: "res4b8_branch2a"
name: "bn4b8_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b8_branch2a"
top: "res4b8_branch2a"
name: "scale4b8_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b8_branch2a"
bottom: "res4b8_branch2a"
name: "res4b8_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b8_branch2a"
top: "res4b8_branch2b"
name: "res4b8_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b8_branch2b"
top: "res4b8_branch2b"
name: "bn4b8_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b8_branch2b"
top: "res4b8_branch2b"
name: "scale4b8_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b8_branch2b"
bottom: "res4b8_branch2b"
name: "res4b8_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b8_branch2b"
top: "res4b8_branch2c"
name: "res4b8_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b8_branch2c"
top: "res4b8_branch2c"
name: "bn4b8_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b8_branch2c"
top: "res4b8_branch2c"
name: "scale4b8_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b7"
bottom: "res4b8_branch2c"
top: "res4b8"
name: "res4b8"
type: "Eltwise"
}
layer {
bottom: "res4b8"
top: "res4b8"
name: "res4b8_relu"
type: "ReLU"
}
layer {
bottom: "res4b8"
top: "res4b9_branch2a"
name: "res4b9_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b9_branch2a"
top: "res4b9_branch2a"
name: "bn4b9_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b9_branch2a"
top: "res4b9_branch2a"
name: "scale4b9_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b9_branch2a"
bottom: "res4b9_branch2a"
name: "res4b9_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b9_branch2a"
top: "res4b9_branch2b"
name: "res4b9_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b9_branch2b"
top: "res4b9_branch2b"
name: "bn4b9_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b9_branch2b"
top: "res4b9_branch2b"
name: "scale4b9_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b9_branch2b"
bottom: "res4b9_branch2b"
name: "res4b9_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b9_branch2b"
top: "res4b9_branch2c"
name: "res4b9_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b9_branch2c"
top: "res4b9_branch2c"
name: "bn4b9_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b9_branch2c"
top: "res4b9_branch2c"
name: "scale4b9_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b8"
bottom: "res4b9_branch2c"
top: "res4b9"
name: "res4b9"
type: "Eltwise"
}
layer {
bottom: "res4b9"
top: "res4b9"
name: "res4b9_relu"
type: "ReLU"
}
layer {
bottom: "res4b9"
top: "res4b10_branch2a"
name: "res4b10_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b10_branch2a"
top: "res4b10_branch2a"
name: "bn4b10_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b10_branch2a"
top: "res4b10_branch2a"
name: "scale4b10_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b10_branch2a"
bottom: "res4b10_branch2a"
name: "res4b10_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b10_branch2a"
top: "res4b10_branch2b"
name: "res4b10_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b10_branch2b"
top: "res4b10_branch2b"
name: "bn4b10_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b10_branch2b"
top: "res4b10_branch2b"
name: "scale4b10_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b10_branch2b"
bottom: "res4b10_branch2b"
name: "res4b10_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b10_branch2b"
top: "res4b10_branch2c"
name: "res4b10_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b10_branch2c"
top: "res4b10_branch2c"
name: "bn4b10_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b10_branch2c"
top: "res4b10_branch2c"
name: "scale4b10_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b9"
bottom: "res4b10_branch2c"
top: "res4b10"
name: "res4b10"
type: "Eltwise"
}
layer {
bottom: "res4b10"
top: "res4b10"
name: "res4b10_relu"
type: "ReLU"
}
layer {
bottom: "res4b10"
top: "res4b11_branch2a"
name: "res4b11_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b11_branch2a"
top: "res4b11_branch2a"
name: "bn4b11_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b11_branch2a"
top: "res4b11_branch2a"
name: "scale4b11_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b11_branch2a"
bottom: "res4b11_branch2a"
name: "res4b11_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b11_branch2a"
top: "res4b11_branch2b"
name: "res4b11_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b11_branch2b"
top: "res4b11_branch2b"
name: "bn4b11_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b11_branch2b"
top: "res4b11_branch2b"
name: "scale4b11_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b11_branch2b"
bottom: "res4b11_branch2b"
name: "res4b11_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b11_branch2b"
top: "res4b11_branch2c"
name: "res4b11_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b11_branch2c"
top: "res4b11_branch2c"
name: "bn4b11_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b11_branch2c"
top: "res4b11_branch2c"
name: "scale4b11_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b10"
bottom: "res4b11_branch2c"
top: "res4b11"
name: "res4b11"
type: "Eltwise"
}
layer {
bottom: "res4b11"
top: "res4b11"
name: "res4b11_relu"
type: "ReLU"
}
layer {
bottom: "res4b11"
top: "res4b12_branch2a"
name: "res4b12_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b12_branch2a"
top: "res4b12_branch2a"
name: "bn4b12_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b12_branch2a"
top: "res4b12_branch2a"
name: "scale4b12_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b12_branch2a"
bottom: "res4b12_branch2a"
name: "res4b12_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b12_branch2a"
top: "res4b12_branch2b"
name: "res4b12_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b12_branch2b"
top: "res4b12_branch2b"
name: "bn4b12_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b12_branch2b"
top: "res4b12_branch2b"
name: "scale4b12_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b12_branch2b"
bottom: "res4b12_branch2b"
name: "res4b12_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b12_branch2b"
top: "res4b12_branch2c"
name: "res4b12_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b12_branch2c"
top: "res4b12_branch2c"
name: "bn4b12_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b12_branch2c"
top: "res4b12_branch2c"
name: "scale4b12_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b11"
bottom: "res4b12_branch2c"
top: "res4b12"
name: "res4b12"
type: "Eltwise"
}
layer {
bottom: "res4b12"
top: "res4b12"
name: "res4b12_relu"
type: "ReLU"
}
layer {
bottom: "res4b12"
top: "res4b13_branch2a"
name: "res4b13_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b13_branch2a"
top: "res4b13_branch2a"
name: "bn4b13_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b13_branch2a"
top: "res4b13_branch2a"
name: "scale4b13_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b13_branch2a"
bottom: "res4b13_branch2a"
name: "res4b13_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b13_branch2a"
top: "res4b13_branch2b"
name: "res4b13_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b13_branch2b"
top: "res4b13_branch2b"
name: "bn4b13_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b13_branch2b"
top: "res4b13_branch2b"
name: "scale4b13_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b13_branch2b"
bottom: "res4b13_branch2b"
name: "res4b13_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b13_branch2b"
top: "res4b13_branch2c"
name: "res4b13_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b13_branch2c"
top: "res4b13_branch2c"
name: "bn4b13_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b13_branch2c"
top: "res4b13_branch2c"
name: "scale4b13_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b12"
bottom: "res4b13_branch2c"
top: "res4b13"
name: "res4b13"
type: "Eltwise"
}
layer {
bottom: "res4b13"
top: "res4b13"
name: "res4b13_relu"
type: "ReLU"
}
layer {
bottom: "res4b13"
top: "res4b14_branch2a"
name: "res4b14_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b14_branch2a"
top: "res4b14_branch2a"
name: "bn4b14_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b14_branch2a"
top: "res4b14_branch2a"
name: "scale4b14_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b14_branch2a"
bottom: "res4b14_branch2a"
name: "res4b14_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b14_branch2a"
top: "res4b14_branch2b"
name: "res4b14_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b14_branch2b"
top: "res4b14_branch2b"
name: "bn4b14_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b14_branch2b"
top: "res4b14_branch2b"
name: "scale4b14_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b14_branch2b"
bottom: "res4b14_branch2b"
name: "res4b14_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b14_branch2b"
top: "res4b14_branch2c"
name: "res4b14_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b14_branch2c"
top: "res4b14_branch2c"
name: "bn4b14_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b14_branch2c"
top: "res4b14_branch2c"
name: "scale4b14_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b13"
bottom: "res4b14_branch2c"
top: "res4b14"
name: "res4b14"
type: "Eltwise"
}
layer {
bottom: "res4b14"
top: "res4b14"
name: "res4b14_relu"
type: "ReLU"
}
layer {
bottom: "res4b14"
top: "res4b15_branch2a"
name: "res4b15_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b15_branch2a"
top: "res4b15_branch2a"
name: "bn4b15_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b15_branch2a"
top: "res4b15_branch2a"
name: "scale4b15_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b15_branch2a"
bottom: "res4b15_branch2a"
name: "res4b15_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b15_branch2a"
top: "res4b15_branch2b"
name: "res4b15_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b15_branch2b"
top: "res4b15_branch2b"
name: "bn4b15_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b15_branch2b"
top: "res4b15_branch2b"
name: "scale4b15_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b15_branch2b"
bottom: "res4b15_branch2b"
name: "res4b15_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b15_branch2b"
top: "res4b15_branch2c"
name: "res4b15_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b15_branch2c"
top: "res4b15_branch2c"
name: "bn4b15_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b15_branch2c"
top: "res4b15_branch2c"
name: "scale4b15_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b14"
bottom: "res4b15_branch2c"
top: "res4b15"
name: "res4b15"
type: "Eltwise"
}
layer {
bottom: "res4b15"
top: "res4b15"
name: "res4b15_relu"
type: "ReLU"
}
layer {
bottom: "res4b15"
top: "res4b16_branch2a"
name: "res4b16_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b16_branch2a"
top: "res4b16_branch2a"
name: "bn4b16_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b16_branch2a"
top: "res4b16_branch2a"
name: "scale4b16_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b16_branch2a"
bottom: "res4b16_branch2a"
name: "res4b16_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b16_branch2a"
top: "res4b16_branch2b"
name: "res4b16_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b16_branch2b"
top: "res4b16_branch2b"
name: "bn4b16_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b16_branch2b"
top: "res4b16_branch2b"
name: "scale4b16_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b16_branch2b"
bottom: "res4b16_branch2b"
name: "res4b16_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b16_branch2b"
top: "res4b16_branch2c"
name: "res4b16_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b16_branch2c"
top: "res4b16_branch2c"
name: "bn4b16_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b16_branch2c"
top: "res4b16_branch2c"
name: "scale4b16_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b15"
bottom: "res4b16_branch2c"
top: "res4b16"
name: "res4b16"
type: "Eltwise"
}
layer {
bottom: "res4b16"
top: "res4b16"
name: "res4b16_relu"
type: "ReLU"
}
layer {
bottom: "res4b16"
top: "res4b17_branch2a"
name: "res4b17_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b17_branch2a"
top: "res4b17_branch2a"
name: "bn4b17_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b17_branch2a"
top: "res4b17_branch2a"
name: "scale4b17_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b17_branch2a"
bottom: "res4b17_branch2a"
name: "res4b17_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b17_branch2a"
top: "res4b17_branch2b"
name: "res4b17_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b17_branch2b"
top: "res4b17_branch2b"
name: "bn4b17_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b17_branch2b"
top: "res4b17_branch2b"
name: "scale4b17_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b17_branch2b"
bottom: "res4b17_branch2b"
name: "res4b17_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b17_branch2b"
top: "res4b17_branch2c"
name: "res4b17_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b17_branch2c"
top: "res4b17_branch2c"
name: "bn4b17_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b17_branch2c"
top: "res4b17_branch2c"
name: "scale4b17_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b16"
bottom: "res4b17_branch2c"
top: "res4b17"
name: "res4b17"
type: "Eltwise"
}
layer {
bottom: "res4b17"
top: "res4b17"
name: "res4b17_relu"
type: "ReLU"
}
layer {
bottom: "res4b17"
top: "res4b18_branch2a"
name: "res4b18_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b18_branch2a"
top: "res4b18_branch2a"
name: "bn4b18_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b18_branch2a"
top: "res4b18_branch2a"
name: "scale4b18_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b18_branch2a"
bottom: "res4b18_branch2a"
name: "res4b18_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b18_branch2a"
top: "res4b18_branch2b"
name: "res4b18_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b18_branch2b"
top: "res4b18_branch2b"
name: "bn4b18_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b18_branch2b"
top: "res4b18_branch2b"
name: "scale4b18_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b18_branch2b"
bottom: "res4b18_branch2b"
name: "res4b18_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b18_branch2b"
top: "res4b18_branch2c"
name: "res4b18_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b18_branch2c"
top: "res4b18_branch2c"
name: "bn4b18_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b18_branch2c"
top: "res4b18_branch2c"
name: "scale4b18_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b17"
bottom: "res4b18_branch2c"
top: "res4b18"
name: "res4b18"
type: "Eltwise"
}
layer {
bottom: "res4b18"
top: "res4b18"
name: "res4b18_relu"
type: "ReLU"
}
layer {
bottom: "res4b18"
top: "res4b19_branch2a"
name: "res4b19_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b19_branch2a"
top: "res4b19_branch2a"
name: "bn4b19_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b19_branch2a"
top: "res4b19_branch2a"
name: "scale4b19_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b19_branch2a"
bottom: "res4b19_branch2a"
name: "res4b19_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b19_branch2a"
top: "res4b19_branch2b"
name: "res4b19_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b19_branch2b"
top: "res4b19_branch2b"
name: "bn4b19_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b19_branch2b"
top: "res4b19_branch2b"
name: "scale4b19_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b19_branch2b"
bottom: "res4b19_branch2b"
name: "res4b19_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b19_branch2b"
top: "res4b19_branch2c"
name: "res4b19_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b19_branch2c"
top: "res4b19_branch2c"
name: "bn4b19_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b19_branch2c"
top: "res4b19_branch2c"
name: "scale4b19_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b18"
bottom: "res4b19_branch2c"
top: "res4b19"
name: "res4b19"
type: "Eltwise"
}
layer {
bottom: "res4b19"
top: "res4b19"
name: "res4b19_relu"
type: "ReLU"
}
layer {
bottom: "res4b19"
top: "res4b20_branch2a"
name: "res4b20_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b20_branch2a"
top: "res4b20_branch2a"
name: "bn4b20_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b20_branch2a"
top: "res4b20_branch2a"
name: "scale4b20_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b20_branch2a"
bottom: "res4b20_branch2a"
name: "res4b20_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b20_branch2a"
top: "res4b20_branch2b"
name: "res4b20_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b20_branch2b"
top: "res4b20_branch2b"
name: "bn4b20_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b20_branch2b"
top: "res4b20_branch2b"
name: "scale4b20_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b20_branch2b"
bottom: "res4b20_branch2b"
name: "res4b20_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b20_branch2b"
top: "res4b20_branch2c"
name: "res4b20_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b20_branch2c"
top: "res4b20_branch2c"
name: "bn4b20_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b20_branch2c"
top: "res4b20_branch2c"
name: "scale4b20_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b19"
bottom: "res4b20_branch2c"
top: "res4b20"
name: "res4b20"
type: "Eltwise"
}
layer {
bottom: "res4b20"
top: "res4b20"
name: "res4b20_relu"
type: "ReLU"
}
layer {
bottom: "res4b20"
top: "res4b21_branch2a"
name: "res4b21_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b21_branch2a"
top: "res4b21_branch2a"
name: "bn4b21_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b21_branch2a"
top: "res4b21_branch2a"
name: "scale4b21_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b21_branch2a"
bottom: "res4b21_branch2a"
name: "res4b21_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b21_branch2a"
top: "res4b21_branch2b"
name: "res4b21_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b21_branch2b"
top: "res4b21_branch2b"
name: "bn4b21_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b21_branch2b"
top: "res4b21_branch2b"
name: "scale4b21_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b21_branch2b"
bottom: "res4b21_branch2b"
name: "res4b21_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b21_branch2b"
top: "res4b21_branch2c"
name: "res4b21_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b21_branch2c"
top: "res4b21_branch2c"
name: "bn4b21_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b21_branch2c"
top: "res4b21_branch2c"
name: "scale4b21_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b20"
bottom: "res4b21_branch2c"
top: "res4b21"
name: "res4b21"
type: "Eltwise"
}
layer {
bottom: "res4b21"
top: "res4b21"
name: "res4b21_relu"
type: "ReLU"
}
layer {
bottom: "res4b21"
top: "res4b22_branch2a"
name: "res4b22_branch2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b22_branch2a"
top: "res4b22_branch2a"
name: "bn4b22_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b22_branch2a"
top: "res4b22_branch2a"
name: "scale4b22_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b22_branch2a"
bottom: "res4b22_branch2a"
name: "res4b22_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res4b22_branch2a"
top: "res4b22_branch2b"
name: "res4b22_branch2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b22_branch2b"
top: "res4b22_branch2b"
name: "bn4b22_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b22_branch2b"
top: "res4b22_branch2b"
name: "scale4b22_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res4b22_branch2b"
bottom: "res4b22_branch2b"
name: "res4b22_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res4b22_branch2b"
top: "res4b22_branch2c"
name: "res4b22_branch2c"
type: "Convolution"
convolution_param {
num_output: 1024
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res4b22_branch2c"
top: "res4b22_branch2c"
name: "bn4b22_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res4b22_branch2c"
top: "res4b22_branch2c"
name: "scale4b22_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res4b21"
bottom: "res4b22_branch2c"
top: "res4b22"
name: "res4b22"
type: "Eltwise"
}
layer {
bottom: "res4b22"
top: "res4b22"
name: "res4b22_relu"
type: "ReLU"
}
#### Add RPN network ####
layer {
name: "rpn_conv/3x3"
type: "Convolution"
bottom: "res4b22"
top: "rpn/output"
param { lr_mult: 1.0 }
param { lr_mult: 2.0 }
convolution_param {
num_output: 512
kernel_size: 3 pad: 1 stride: 1
weight_filler { type: "gaussian" std: 0.01 }
bias_filler { type: "constant" value: 0 }
}
}
layer {
name: "rpn_relu/3x3"
type: "ReLU"
bottom: "rpn/output"
top: "rpn/output"
}
layer {
name: "rpn_cls_score"
type: "Convolution"
bottom: "rpn/output"
top: "rpn_cls_score"
param { lr_mult: 1.0 }
param { lr_mult: 2.0 }
convolution_param {
num_output: 18 # 2(bg/fg) * 9(anchors)
kernel_size: 1 pad: 0 stride: 1
weight_filler { type: "gaussian" std: 0.01 }
bias_filler { type: "constant" value: 0 }
}
}
layer {
name: "rpn_bbox_pred"
type: "Convolution"
bottom: "rpn/output"
top: "rpn_bbox_pred"
param { lr_mult: 1.0 }
param { lr_mult: 2.0 }
convolution_param {
num_output: 36 # 4 * 9(anchors)
kernel_size: 1 pad: 0 stride: 1
weight_filler { type: "gaussian" std: 0.01 }
bias_filler { type: "constant" value: 0 }
}
}
layer {
bottom: "rpn_cls_score"
top: "rpn_cls_score_reshape"
name: "rpn_cls_score_reshape"
type: "Reshape"
reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } }
}
layer {
name: 'rpn-data'
type: 'Python'
bottom: 'rpn_cls_score'
bottom: 'gt_boxes'
bottom: 'im_info'
bottom: 'data'
top: 'rpn_labels'
top: 'rpn_bbox_targets'
top: 'rpn_bbox_inside_weights'
top: 'rpn_bbox_outside_weights'
python_param {
module: 'rpn.anchor_target_layer'
layer: 'AnchorTargetLayer'
param_str: "'feat_stride': 16"
}
}
layer {
name: "rpn_loss_cls"
type: "SoftmaxWithLoss"
bottom: "rpn_cls_score_reshape"
bottom: "rpn_labels"
propagate_down: 1
propagate_down: 0
top: "rpn_cls_loss"
loss_weight: 1
loss_param {
ignore_label: -1
normalize: true
}
}
layer {
name: "rpn_loss_bbox"
type: "SmoothL1Loss"
bottom: "rpn_bbox_pred"
bottom: "rpn_bbox_targets"
bottom: 'rpn_bbox_inside_weights'
bottom: 'rpn_bbox_outside_weights'
top: "rpn_loss_bbox"
loss_weight: 1
smooth_l1_loss_param { sigma: 3.0 }
}
#========= RoI Proposal ============
layer {
name: "rpn_cls_prob"
type: "Softmax"
bottom: "rpn_cls_score_reshape"
top: "rpn_cls_prob"
}
layer {
name: 'rpn_cls_prob_reshape'
type: 'Reshape'
bottom: 'rpn_cls_prob'
top: 'rpn_cls_prob_reshape'
reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } }
}
layer {
name: 'proposal'
type: 'Python'
bottom: 'rpn_cls_prob_reshape'
bottom: 'rpn_bbox_pred'
bottom: 'im_info'
top: 'rpn_rois'
# top: 'rpn_scores'
python_param {
module: 'rpn.proposal_layer'
layer: 'ProposalLayer'
param_str: "'feat_stride': 16"
}
}
#layer {
# name: 'debug-data'
# type: 'Python'
# bottom: 'data'
# bottom: 'rpn_rois'
# bottom: 'rpn_scores'
# python_param {
# module: 'rpn.debug_layer'
# layer: 'RPNDebugLayer'
# }
#}
layer {
name: 'roi-data'
type: 'Python'
bottom: 'rpn_rois'
bottom: 'gt_boxes'
top: 'rois'
top: 'labels'
top: 'bbox_targets'
top: 'bbox_inside_weights'
top: 'bbox_outside_weights'
python_param {
module: 'rpn.proposal_target_layer'
layer: 'ProposalTargetLayer'
param_str: "'num_classes': 40"
}
}
##### Stop Resnet till conv4x and add ROIpooling
layer {
name: "roi_pool5"
type: "ROIPooling"
bottom: "res4b22"
bottom: "rois"
top: "roipool5"
roi_pooling_param {
pooled_w: 14
pooled_h: 14
spatial_scale: 0.0625 # 1/16
}
}
##### Resume Resnet conv5x layers after ROIpooling layers
layer {
bottom: "roipool5"
top: "res5a_branch1"
name: "res5a_branch1"
type: "Convolution"
convolution_param {
num_output: 2048
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res5a_branch1"
top: "res5a_branch1"
name: "bn5a_branch1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5a_branch1"
top: "res5a_branch1"
name: "scale5a_branch1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "roipool5"
top: "res5a_branch2a"
name: "res5a_branch2a"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "res5a_branch2a"
top: "res5a_branch2a"
name: "bn5a_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5a_branch2a"
top: "res5a_branch2a"
name: "scale5a_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5a_branch2a"
bottom: "res5a_branch2a"
name: "res5a_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res5a_branch2a"
top: "res5a_branch2b"
name: "res5a_branch2b"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res5a_branch2b"
top: "res5a_branch2b"
name: "bn5a_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5a_branch2b"
top: "res5a_branch2b"
name: "scale5a_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5a_branch2b"
bottom: "res5a_branch2b"
name: "res5a_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res5a_branch2b"
top: "res5a_branch2c"
name: "res5a_branch2c"
type: "Convolution"
convolution_param {
num_output: 2048
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res5a_branch2c"
top: "res5a_branch2c"
name: "bn5a_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5a_branch2c"
top: "res5a_branch2c"
name: "scale5a_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res5a_branch1"
bottom: "res5a_branch2c"
top: "res5a"
name: "res5a"
type: "Eltwise"
}
layer {
bottom: "res5a"
top: "res5a"
name: "res5a_relu"
type: "ReLU"
}
layer {
name: "res5a_drop"
type: "Dropout"
bottom: "res5a"
top: "res5a"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
bottom: "res5a"
top: "res5b_branch2a"
name: "res5b_branch2a"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res5b_branch2a"
top: "res5b_branch2a"
name: "bn5b_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5b_branch2a"
top: "res5b_branch2a"
name: "scale5b_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5b_branch2a"
bottom: "res5b_branch2a"
name: "res5b_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res5b_branch2a"
top: "res5b_branch2b"
name: "res5b_branch2b"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res5b_branch2b"
top: "res5b_branch2b"
name: "bn5b_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5b_branch2b"
top: "res5b_branch2b"
name: "scale5b_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5b_branch2b"
bottom: "res5b_branch2b"
name: "res5b_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res5b_branch2b"
top: "res5b_branch2c"
name: "res5b_branch2c"
type: "Convolution"
convolution_param {
num_output: 2048
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res5b_branch2c"
top: "res5b_branch2c"
name: "bn5b_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5b_branch2c"
top: "res5b_branch2c"
name: "scale5b_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res5a"
bottom: "res5b_branch2c"
top: "res5b"
name: "res5b"
type: "Eltwise"
}
layer {
bottom: "res5b"
top: "res5b"
name: "res5b_relu"
type: "ReLU"
}
layer {
name: "res5b_drop"
type: "Dropout"
bottom: "res5b"
top: "res5b"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
bottom: "res5b"
top: "res5c_branch2a"
name: "res5c_branch2a"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res5c_branch2a"
top: "res5c_branch2a"
name: "bn5c_branch2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5c_branch2a"
top: "res5c_branch2a"
name: "scale5c_branch2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5c_branch2a"
bottom: "res5c_branch2a"
name: "res5c_branch2a_relu"
type: "ReLU"
}
layer {
bottom: "res5c_branch2a"
top: "res5c_branch2b"
name: "res5c_branch2b"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "res5c_branch2b"
top: "res5c_branch2b"
name: "bn5c_branch2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5c_branch2b"
top: "res5c_branch2b"
name: "scale5c_branch2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
top: "res5c_branch2b"
bottom: "res5c_branch2b"
name: "res5c_branch2b_relu"
type: "ReLU"
}
layer {
bottom: "res5c_branch2b"
top: "res5c_branch2c"
name: "res5c_branch2c"
type: "Convolution"
convolution_param {
num_output: 2048
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "res5c_branch2c"
top: "res5c_branch2c"
name: "bn5c_branch2c"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "res5c_branch2c"
top: "res5c_branch2c"
name: "scale5c_branch2c"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "res5b"
bottom: "res5c_branch2c"
top: "res5c"
name: "res5c"
type: "Eltwise"
}
layer {
bottom: "res5c"
top: "res5c"
name: "res5c_relu"
type: "ReLU"
}
layer {
name: "res5c_drop"
type: "Dropout"
bottom: "res5c"
top: "res5c"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
bottom: "res5c"
top: "pool5"
name: "pool5"
type: "Pooling"
pooling_param {
kernel_size: 7
stride: 1
pool: AVE
}
}
######### Add faster RCNN cls and bbox layer
layer {
name: "cls_score"
type: "InnerProduct"
bottom: "pool5"
top: "cls_score"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 40
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "bbox_pred"
type: "InnerProduct"
bottom: "pool5"
top: "bbox_pred"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 160
weight_filler {
type: "gaussian"
std: 0.001
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "loss_cls"
type: "SoftmaxWithLoss"
bottom: "cls_score"
bottom: "labels"
propagate_down: 1
propagate_down: 0
top: "loss_cls"
loss_weight: 1
}
layer {
name: "loss_bbox"
type: "SmoothL1Loss"
bottom: "bbox_pred"
bottom: "bbox_targets"
bottom: "bbox_inside_weights"
bottom: "bbox_outside_weights"
top: "loss_bbox"
loss_weight: 1
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment