Skip to content

Instantly share code, notes, and snippets.

@KaleidoZhouYN
Created January 31, 2019 10:58
Show Gist options
  • Save KaleidoZhouYN/fcad57447aebbd6be6eab886a7692d8f to your computer and use it in GitHub Desktop.
Save KaleidoZhouYN/fcad57447aebbd6be6eab886a7692d8f to your computer and use it in GitHub Desktop.
mobilefacenetv2
layer {
name: "data"
type: "Input"
top: "data"
input_param {
shape {
dim: 1
dim: 3
dim: 112
dim: 112
}
}
}
layer {
name: "ConvNd_1"
type: "Convolution"
bottom: "data"
top: "ConvNd_1"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
group: 1
stride: 2
dilation: 1
}
}
layer {
name: "PReLU_1"
type: "PReLU"
bottom: "ConvNd_1"
top: "PReLU_1"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_1"
type: "BatchNorm"
bottom: "PReLU_1"
top: "BatchNorm_1"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_2"
type: "Convolution"
bottom: "BatchNorm_1"
top: "ConvNd_2"
convolution_param {
num_output: 64
bias_term: true
pad: 1
kernel_size: 3
group: 64
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_2"
type: "BatchNorm"
bottom: "ConvNd_2"
top: "BatchNorm_2"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_3"
type: "Convolution"
bottom: "BatchNorm_2"
top: "ConvNd_3"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_2"
type: "PReLU"
bottom: "ConvNd_3"
top: "PReLU_2"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_3"
type: "BatchNorm"
bottom: "PReLU_2"
top: "BatchNorm_3"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_4"
type: "Convolution"
bottom: "BatchNorm_3"
top: "ConvNd_4"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 2
dilation: 1
}
}
layer {
name: "PReLU_3"
type: "PReLU"
bottom: "ConvNd_4"
top: "PReLU_3"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_4"
type: "BatchNorm"
bottom: "PReLU_3"
top: "BatchNorm_4"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_5"
type: "Convolution"
bottom: "BatchNorm_4"
top: "ConvNd_5"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_5"
type: "BatchNorm"
bottom: "ConvNd_5"
top: "BatchNorm_5"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_6"
type: "Convolution"
bottom: "BatchNorm_5"
top: "ConvNd_6"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_4"
type: "PReLU"
bottom: "ConvNd_6"
top: "PReLU_4"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_6"
type: "BatchNorm"
bottom: "PReLU_4"
top: "BatchNorm_6"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_7"
type: "Convolution"
bottom: "BatchNorm_6"
top: "ConvNd_7"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_5"
type: "PReLU"
bottom: "ConvNd_7"
top: "PReLU_5"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_1"
type: "Eltwise"
bottom: "PReLU_5"
bottom: "PReLU_4"
top: "Add_1"
}
layer {
name: "BatchNorm_7"
type: "BatchNorm"
bottom: "Add_1"
top: "BatchNorm_7"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_8"
type: "Convolution"
bottom: "BatchNorm_7"
top: "ConvNd_8"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_2"
type: "Eltwise"
bottom: "ConvNd_5"
bottom: "ConvNd_8"
top: "Add_2"
}
layer {
name: "BatchNorm_8"
type: "BatchNorm"
bottom: "Add_2"
top: "BatchNorm_8"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_9"
type: "Convolution"
bottom: "BatchNorm_8"
top: "ConvNd_9"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_6"
type: "PReLU"
bottom: "ConvNd_9"
top: "PReLU_6"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_9"
type: "BatchNorm"
bottom: "PReLU_6"
top: "BatchNorm_9"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_10"
type: "Convolution"
bottom: "BatchNorm_9"
top: "ConvNd_10"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_7"
type: "PReLU"
bottom: "ConvNd_10"
top: "PReLU_7"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_3"
type: "Eltwise"
bottom: "PReLU_7"
bottom: "PReLU_6"
top: "Add_3"
}
layer {
name: "BatchNorm_10"
type: "BatchNorm"
bottom: "Add_3"
top: "BatchNorm_10"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_11"
type: "Convolution"
bottom: "BatchNorm_10"
top: "ConvNd_11"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_4"
type: "Eltwise"
bottom: "Add_2"
bottom: "ConvNd_11"
top: "Add_4"
}
layer {
name: "BatchNorm_11"
type: "BatchNorm"
bottom: "Add_4"
top: "BatchNorm_11"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_12"
type: "Convolution"
bottom: "BatchNorm_11"
top: "ConvNd_12"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_8"
type: "PReLU"
bottom: "ConvNd_12"
top: "PReLU_8"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_12"
type: "BatchNorm"
bottom: "PReLU_8"
top: "BatchNorm_12"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_13"
type: "Convolution"
bottom: "BatchNorm_12"
top: "ConvNd_13"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_9"
type: "PReLU"
bottom: "ConvNd_13"
top: "PReLU_9"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_5"
type: "Eltwise"
bottom: "PReLU_9"
bottom: "PReLU_8"
top: "Add_5"
}
layer {
name: "BatchNorm_13"
type: "BatchNorm"
bottom: "Add_5"
top: "BatchNorm_13"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_14"
type: "Convolution"
bottom: "BatchNorm_13"
top: "ConvNd_14"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_6"
type: "Eltwise"
bottom: "Add_4"
bottom: "ConvNd_14"
top: "Add_6"
}
layer {
name: "BatchNorm_14"
type: "BatchNorm"
bottom: "Add_6"
top: "BatchNorm_14"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_15"
type: "Convolution"
bottom: "BatchNorm_14"
top: "ConvNd_15"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_10"
type: "PReLU"
bottom: "ConvNd_15"
top: "PReLU_10"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_15"
type: "BatchNorm"
bottom: "PReLU_10"
top: "BatchNorm_15"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_16"
type: "Convolution"
bottom: "BatchNorm_15"
top: "ConvNd_16"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_11"
type: "PReLU"
bottom: "ConvNd_16"
top: "PReLU_11"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_7"
type: "Eltwise"
bottom: "PReLU_11"
bottom: "PReLU_10"
top: "Add_7"
}
layer {
name: "BatchNorm_16"
type: "BatchNorm"
bottom: "Add_7"
top: "BatchNorm_16"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_17"
type: "Convolution"
bottom: "BatchNorm_16"
top: "ConvNd_17"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_8"
type: "Eltwise"
bottom: "Add_6"
bottom: "ConvNd_17"
top: "Add_8"
}
layer {
name: "BatchNorm_17"
type: "BatchNorm"
bottom: "Add_8"
top: "BatchNorm_17"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_18"
type: "Convolution"
bottom: "BatchNorm_17"
top: "ConvNd_18"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_12"
type: "PReLU"
bottom: "ConvNd_18"
top: "PReLU_12"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_18"
type: "BatchNorm"
bottom: "PReLU_12"
top: "BatchNorm_18"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_19"
type: "Convolution"
bottom: "BatchNorm_18"
top: "ConvNd_19"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 2
dilation: 1
}
}
layer {
name: "PReLU_13"
type: "PReLU"
bottom: "ConvNd_19"
top: "PReLU_13"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_19"
type: "BatchNorm"
bottom: "PReLU_13"
top: "BatchNorm_19"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_20"
type: "Convolution"
bottom: "BatchNorm_19"
top: "ConvNd_20"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_20"
type: "BatchNorm"
bottom: "ConvNd_20"
top: "BatchNorm_20"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_21"
type: "Convolution"
bottom: "BatchNorm_20"
top: "ConvNd_21"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_14"
type: "PReLU"
bottom: "ConvNd_21"
top: "PReLU_14"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_21"
type: "BatchNorm"
bottom: "PReLU_14"
top: "BatchNorm_21"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_22"
type: "Convolution"
bottom: "BatchNorm_21"
top: "ConvNd_22"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_15"
type: "PReLU"
bottom: "ConvNd_22"
top: "PReLU_15"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_9"
type: "Eltwise"
bottom: "PReLU_15"
bottom: "PReLU_14"
top: "Add_9"
}
layer {
name: "BatchNorm_22"
type: "BatchNorm"
bottom: "Add_9"
top: "BatchNorm_22"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_23"
type: "Convolution"
bottom: "BatchNorm_22"
top: "ConvNd_23"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_10"
type: "Eltwise"
bottom: "ConvNd_20"
bottom: "ConvNd_23"
top: "Add_10"
}
layer {
name: "BatchNorm_23"
type: "BatchNorm"
bottom: "Add_10"
top: "BatchNorm_23"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_24"
type: "Convolution"
bottom: "BatchNorm_23"
top: "ConvNd_24"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_16"
type: "PReLU"
bottom: "ConvNd_24"
top: "PReLU_16"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_24"
type: "BatchNorm"
bottom: "PReLU_16"
top: "BatchNorm_24"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_25"
type: "Convolution"
bottom: "BatchNorm_24"
top: "ConvNd_25"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_17"
type: "PReLU"
bottom: "ConvNd_25"
top: "PReLU_17"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_11"
type: "Eltwise"
bottom: "PReLU_17"
bottom: "PReLU_16"
top: "Add_11"
}
layer {
name: "BatchNorm_25"
type: "BatchNorm"
bottom: "Add_11"
top: "BatchNorm_25"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_26"
type: "Convolution"
bottom: "BatchNorm_25"
top: "ConvNd_26"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_12"
type: "Eltwise"
bottom: "Add_10"
bottom: "ConvNd_26"
top: "Add_12"
}
layer {
name: "BatchNorm_26"
type: "BatchNorm"
bottom: "Add_12"
top: "BatchNorm_26"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_27"
type: "Convolution"
bottom: "BatchNorm_26"
top: "ConvNd_27"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_18"
type: "PReLU"
bottom: "ConvNd_27"
top: "PReLU_18"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_27"
type: "BatchNorm"
bottom: "PReLU_18"
top: "BatchNorm_27"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_28"
type: "Convolution"
bottom: "BatchNorm_27"
top: "ConvNd_28"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_19"
type: "PReLU"
bottom: "ConvNd_28"
top: "PReLU_19"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_13"
type: "Eltwise"
bottom: "PReLU_19"
bottom: "PReLU_18"
top: "Add_13"
}
layer {
name: "BatchNorm_28"
type: "BatchNorm"
bottom: "Add_13"
top: "BatchNorm_28"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_29"
type: "Convolution"
bottom: "BatchNorm_28"
top: "ConvNd_29"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_14"
type: "Eltwise"
bottom: "Add_12"
bottom: "ConvNd_29"
top: "Add_14"
}
layer {
name: "BatchNorm_29"
type: "BatchNorm"
bottom: "Add_14"
top: "BatchNorm_29"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_30"
type: "Convolution"
bottom: "BatchNorm_29"
top: "ConvNd_30"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_20"
type: "PReLU"
bottom: "ConvNd_30"
top: "PReLU_20"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_30"
type: "BatchNorm"
bottom: "PReLU_20"
top: "BatchNorm_30"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_31"
type: "Convolution"
bottom: "BatchNorm_30"
top: "ConvNd_31"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_21"
type: "PReLU"
bottom: "ConvNd_31"
top: "PReLU_21"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_15"
type: "Eltwise"
bottom: "PReLU_21"
bottom: "PReLU_20"
top: "Add_15"
}
layer {
name: "BatchNorm_31"
type: "BatchNorm"
bottom: "Add_15"
top: "BatchNorm_31"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_32"
type: "Convolution"
bottom: "BatchNorm_31"
top: "ConvNd_32"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_16"
type: "Eltwise"
bottom: "Add_14"
bottom: "ConvNd_32"
top: "Add_16"
}
layer {
name: "BatchNorm_32"
type: "BatchNorm"
bottom: "Add_16"
top: "BatchNorm_32"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_33"
type: "Convolution"
bottom: "BatchNorm_32"
top: "ConvNd_33"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_22"
type: "PReLU"
bottom: "ConvNd_33"
top: "PReLU_22"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_33"
type: "BatchNorm"
bottom: "PReLU_22"
top: "BatchNorm_33"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_34"
type: "Convolution"
bottom: "BatchNorm_33"
top: "ConvNd_34"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_23"
type: "PReLU"
bottom: "ConvNd_34"
top: "PReLU_23"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_17"
type: "Eltwise"
bottom: "PReLU_23"
bottom: "PReLU_22"
top: "Add_17"
}
layer {
name: "BatchNorm_34"
type: "BatchNorm"
bottom: "Add_17"
top: "BatchNorm_34"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_35"
type: "Convolution"
bottom: "BatchNorm_34"
top: "ConvNd_35"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_18"
type: "Eltwise"
bottom: "Add_16"
bottom: "ConvNd_35"
top: "Add_18"
}
layer {
name: "BatchNorm_35"
type: "BatchNorm"
bottom: "Add_18"
top: "BatchNorm_35"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_36"
type: "Convolution"
bottom: "BatchNorm_35"
top: "ConvNd_36"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_24"
type: "PReLU"
bottom: "ConvNd_36"
top: "PReLU_24"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_36"
type: "BatchNorm"
bottom: "PReLU_24"
top: "BatchNorm_36"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_37"
type: "Convolution"
bottom: "BatchNorm_36"
top: "ConvNd_37"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_25"
type: "PReLU"
bottom: "ConvNd_37"
top: "PReLU_25"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_19"
type: "Eltwise"
bottom: "PReLU_25"
bottom: "PReLU_24"
top: "Add_19"
}
layer {
name: "BatchNorm_37"
type: "BatchNorm"
bottom: "Add_19"
top: "BatchNorm_37"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_38"
type: "Convolution"
bottom: "BatchNorm_37"
top: "ConvNd_38"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_20"
type: "Eltwise"
bottom: "Add_18"
bottom: "ConvNd_38"
top: "Add_20"
}
layer {
name: "BatchNorm_38"
type: "BatchNorm"
bottom: "Add_20"
top: "BatchNorm_38"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_39"
type: "Convolution"
bottom: "BatchNorm_38"
top: "ConvNd_39"
convolution_param {
num_output: 256
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_26"
type: "PReLU"
bottom: "ConvNd_39"
top: "PReLU_26"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_39"
type: "BatchNorm"
bottom: "PReLU_26"
top: "BatchNorm_39"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_40"
type: "Convolution"
bottom: "BatchNorm_39"
top: "ConvNd_40"
convolution_param {
num_output: 256
bias_term: true
pad: 1
kernel_size: 3
group: 256
stride: 2
dilation: 1
}
}
layer {
name: "PReLU_27"
type: "PReLU"
bottom: "ConvNd_40"
top: "PReLU_27"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_40"
type: "BatchNorm"
bottom: "PReLU_27"
top: "BatchNorm_40"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_41"
type: "Convolution"
bottom: "BatchNorm_40"
top: "ConvNd_41"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_41"
type: "BatchNorm"
bottom: "ConvNd_41"
top: "BatchNorm_41"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_42"
type: "Convolution"
bottom: "BatchNorm_41"
top: "ConvNd_42"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_28"
type: "PReLU"
bottom: "ConvNd_42"
top: "PReLU_28"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_42"
type: "BatchNorm"
bottom: "PReLU_28"
top: "BatchNorm_42"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_43"
type: "Convolution"
bottom: "BatchNorm_42"
top: "ConvNd_43"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_29"
type: "PReLU"
bottom: "ConvNd_43"
top: "PReLU_29"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_21"
type: "Eltwise"
bottom: "PReLU_29"
bottom: "PReLU_28"
top: "Add_21"
}
layer {
name: "BatchNorm_43"
type: "BatchNorm"
bottom: "Add_21"
top: "BatchNorm_43"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_44"
type: "Convolution"
bottom: "BatchNorm_43"
top: "ConvNd_44"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_22"
type: "Eltwise"
bottom: "ConvNd_41"
bottom: "ConvNd_44"
top: "Add_22"
}
layer {
name: "BatchNorm_44"
type: "BatchNorm"
bottom: "Add_22"
top: "BatchNorm_44"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_45"
type: "Convolution"
bottom: "BatchNorm_44"
top: "ConvNd_45"
convolution_param {
num_output: 512
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_30"
type: "PReLU"
bottom: "ConvNd_45"
top: "PReLU_30"
prelu_param {
channel_shared: false
}
}
layer {
name: "BatchNorm_45"
type: "BatchNorm"
bottom: "PReLU_30"
top: "BatchNorm_45"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_46"
type: "Convolution"
bottom: "BatchNorm_45"
top: "ConvNd_46"
convolution_param {
num_output: 512
bias_term: true
pad: 1
kernel_size: 3
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "PReLU_31"
type: "PReLU"
bottom: "ConvNd_46"
top: "PReLU_31"
prelu_param {
channel_shared: false
}
}
layer {
name: "Add_23"
type: "Eltwise"
bottom: "PReLU_31"
bottom: "PReLU_30"
top: "Add_23"
}
layer {
name: "BatchNorm_46"
type: "BatchNorm"
bottom: "Add_23"
top: "BatchNorm_46"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "ConvNd_47"
type: "Convolution"
bottom: "BatchNorm_46"
top: "ConvNd_47"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "Add_24"
type: "Eltwise"
bottom: "Add_22"
bottom: "ConvNd_47"
top: "Add_24"
}
layer {
name: "ConvNd_48"
type: "Convolution"
bottom: "Add_24"
top: "ConvNd_48"
convolution_param {
num_output: 512
bias_term: false
pad: 0
kernel_size: 1
group: 1
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_47"
type: "BatchNorm"
bottom: "ConvNd_48"
top: "BatchNorm_47"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "BatchNorm_47_scale"
type: "Scale"
bottom: "BatchNorm_47"
top: "BatchNorm_47"
scale_param {
bias_term: true
}
}
layer {
name: "PReLU_32"
type: "PReLU"
bottom: "BatchNorm_47"
top: "PReLU_32"
prelu_param {
channel_shared: false
}
}
layer {
name: "ConvNd_49"
type: "Convolution"
bottom: "PReLU_32"
top: "ConvNd_49"
convolution_param {
num_output: 512
bias_term: false
pad: 0
kernel_size: 7
group: 512
stride: 1
dilation: 1
}
}
layer {
name: "BatchNorm_48"
type: "BatchNorm"
bottom: "ConvNd_49"
top: "BatchNorm_48"
batch_norm_param {
use_global_stats: true
eps: 9.99999974738e-06
}
}
layer {
name: "BatchNorm_48_scale"
type: "Scale"
bottom: "BatchNorm_48"
top: "BatchNorm_48"
scale_param {
bias_term: true
}
}
layer {
name: "PReLU_33"
type: "PReLU"
bottom: "BatchNorm_48"
top: "PReLU_33"
prelu_param {
channel_shared: false
}
}
layer {
name: "View_1"
type: "Flatten"
bottom: "PReLU_33"
top: "View_1"
}
layer {
name: "Addmm_1"
type: "InnerProduct"
bottom: "View_1"
top: "Addmm_1"
inner_product_param {
num_output: 128
bias_term: true
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment