Skip to content

Instantly share code, notes, and snippets.

@KaleidoZhouYN
Created January 16, 2018 02:00
Show Gist options
  • Save KaleidoZhouYN/2cdd1867631d27b58297e68bb3f3057c to your computer and use it in GitHub Desktop.
Save KaleidoZhouYN/2cdd1867631d27b58297e68bb3f3057c to your computer and use it in GitHub Desktop.
convFace-12
input: "data"
input_dim: 1
input_dim: 3
input_dim: 112
input_dim: 112
##################################
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 64
kernel_size: 5
stride: 2
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu1"
type: "PReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "conv1_1"
type: "Convolution"
bottom: "conv1"
top: "conv1_1"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 64
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu1_1"
type: "PReLU"
bottom: "conv1_1"
top: "conv1_1"
}
layer {
name: "conv1_2"
type: "Convolution"
bottom: "conv1_1"
top: "conv1_2"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 64
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu1_2"
type: "PReLU"
bottom: "conv1_2"
top: "conv1_2"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "conv1_2"
top: "conv2"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 128
kernel_size: 3
stride: 2
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu2"
type: "PReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "conv2_1"
type: "Convolution"
bottom: "conv2"
top: "conv2_1"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 128
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu2_1"
type: "PReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_2"
type: "Convolution"
bottom: "conv2_1"
top: "conv2_2"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 128
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu2_2"
type: "PReLU"
bottom: "conv2_2"
top: "conv2_2"
}
layer {
name: "conv3"
type: "Convolution"
bottom: "conv2_2"
top: "conv3"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 256
kernel_size: 3
stride: 2
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu3"
type: "PReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv3_1"
type: "Convolution"
bottom: "conv3"
top: "conv3_1"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 256
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu3_1"
type: "PReLU"
bottom: "conv3_1"
top: "conv3_1"
}
layer {
name: "conv3_2"
type: "Convolution"
bottom: "conv3_1"
top: "conv3_2"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 256
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu3_2"
type: "PReLU"
bottom: "conv3_2"
top: "conv3_2"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3_2"
top: "conv4"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 512
kernel_size: 3
stride: 2
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu4"
type: "PReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv4_1"
type: "Convolution"
bottom: "conv4"
top: "conv4_1"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu4_1"
type: "PReLU"
bottom: "conv4_1"
top: "conv4_1"
}
layer {
name: "conv4_2"
type: "Convolution"
bottom: "conv4_1"
top: "conv4_2"
param {
lr_mult: 1
decay_mult: 1
}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad:1
weight_filler {
type: "xavier"
}
bias_term: false
normalize:true
}
}
layer {
name: "relu4_2"
type: "PReLU"
bottom: "conv4_2"
top: "conv4_2"
}
layer {
name: "fc5"
type: "InnerProduct"
bottom: "conv4_2"
top: "fc5"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 128
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment