Skip to content

Instantly share code, notes, and snippets.

@alexpalman
Last active May 5, 2018 18:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alexpalman/0373d8bd31c1bb93265e0cbb10b6d056 to your computer and use it in GitHub Desktop.
Save alexpalman/0373d8bd31c1bb93265e0cbb10b6d056 to your computer and use it in GitHub Desktop.
name: "Modified ResNet-18"
input: "input"
input_dim: 1
input_dim: 1
input_dim: 96
input_dim: 64
layer {
bottom: "input"
top: "conv1-convolution"
name: "conv1-convolution"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 7
pad: 3
stride: 1
}
}
layer {
bottom: "conv1-convolution"
top: "conv1-bn"
name: "conv1-bn"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv1-bn"
top: "conv1-bn-scale"
name: "conv1-bn-scale"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv1-bn-scale"
top: "conv1-activation"
name: "conv1-activation"
type: "ReLU"
}
layer {
bottom: "conv1-activation"
top: "conv1-maxpool"
name: "conv1-maxpool"
type: "Pooling"
pooling_param {
kernel_size: 3
stride: 2
pool: MAX
}
}
# Conv layer 2
layer {
bottom: "conv1-maxpool"
top: "conv2-convolution2-1"
name: "conv2-convolution2-1"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 1
pad: 0
stride: 1
bias_term: false
}
}
layer {
bottom: "conv2-convolution2-1"
top: "conv2-bn2-1"
name: "conv2-bn2-1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv2-bn2-1"
top: "conv2-bn-scale2-1"
name: "conv2-bn-scale2-1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv1-maxpool"
top: "conv2-convolution2-2a"
name: "conv2-convolution2-2a"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv2-convolution2-2a"
top: "conv2-bn2-2a"
name: "conv2-bn2-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv2-bn2-2a"
top: "conv2-bn-scale2-2a"
name: "conv2-bn-scale2-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv2-bn-scale2-2a"
top: "conv2-activation2-2a"
name: "conv2-activation2-2a"
type: "ReLU"
}
layer {
bottom: "conv2-activation2-2a"
top: "conv2-convolution2-2b"
name: "conv2-convolution2-2b"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv2-convolution2-2b"
top: "conv2-bn2-2b"
name: "conv2-bn2-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv2-bn2-2b"
top: "conv2-bn-scale2-2b"
name: "conv2-bn-scale2-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv2-bn-scale2-1"
bottom: "conv2-bn-scale2-2b"
top: "conv2-shortcut2"
name: "conv2-shortcut2"
type: "Eltwise"
}
layer {
bottom: "conv2-shortcut2"
top: "conv2-activation"
name: "conv2-activation"
type: "ReLU"
}
# Id layer 2
layer {
bottom: "conv2-activation"
top: "conv2-id1-convolution2-2a"
name: "conv2-id1-convolution2-2a"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv2-id1-convolution2-2a"
top: "conv2-id1-bn2-2a"
name: "conv2-id1-bn2-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv2-id1-bn2-2a"
top: "conv2-id1-bn-scale2-2a"
name: "conv2-id1-bn-scale2-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv2-id1-bn-scale2-2a"
top: "conv2-id1-activation2-2a"
name: "conv2-id1-activation2-2a"
type: "ReLU"
}
layer {
bottom: "conv2-id1-activation2-2a"
top: "conv2-id1-convolution2-2b"
name: "conv2-id1-convolution2-2b"
type: "Convolution"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv2-id1-convolution2-2b"
top: "conv2-id1-bn2-2b"
name: "conv2-id1-bn2-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv2-id1-bn2-2b"
top: "conv2-id1-bn-scale2-2b"
name: "conv2-id1-bn-scale2-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv2-activation"
bottom: "conv2-id1-bn-scale2-2b"
top: "conv2-id1-shortcut2"
name: "conv2-id1-shortcut2"
type: "Eltwise"
}
layer {
bottom: "conv2-id1-shortcut2"
top: "conv2-id1-activation"
name: "conv2-id1-activation"
type: "ReLU"
}
# Conv layer 3
layer {
bottom: "conv2-id1-activation"
top: "conv3-convolution3-1"
name: "conv3-convolution3-1"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "conv3-convolution3-1"
top: "conv3-bn3-1"
name: "conv3-bn3-1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv3-bn3-1"
top: "conv3-bn-scale3-1"
name: "conv3-bn-scale3-1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv2-id1-activation"
top: "conv3-convolution3-2a"
name: "conv3-convolution3-2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 2
bias_term: false
}
}
layer {
bottom: "conv3-convolution3-2a"
top: "conv3-bn3-2a"
name: "conv3-bn3-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv3-bn3-2a"
top: "conv3-bn-scale3-2a"
name: "conv3-bn-scale3-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv3-bn-scale3-2a"
top: "conv3-activation3-2a"
name: "conv3-activation3-2a"
type: "ReLU"
}
layer {
bottom: "conv3-activation3-2a"
top: "conv3-convolution3-2b"
name: "conv3-convolution3-2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv3-convolution3-2b"
top: "conv3-bn3-2b"
name: "conv3-bn3-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv3-bn3-2b"
top: "conv3-bn-scale3-2b"
name: "conv3-bn-scale3-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv3-bn-scale3-1"
bottom: "conv3-bn-scale3-2b"
top: "conv3-shortcut3"
name: "conv3-shortcut3"
type: "Eltwise"
}
layer {
bottom: "conv3-shortcut3"
top: "conv3-activation"
name: "conv3-activation"
type: "ReLU"
}
# Id layer 3
layer {
bottom: "conv3-activation"
top: "conv3-id1-convolution3-2a"
name: "conv3-id1-convolution3-2a"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv3-id1-convolution3-2a"
top: "conv3-id1-bn3-2a"
name: "conv3-id1-bn3-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv3-id1-bn3-2a"
top: "conv3-id1-bn-scale3-2a"
name: "conv3-id1-bn-scale3-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv3-id1-bn-scale3-2a"
top: "conv3-id1-activation3-2a"
name: "conv3-id1-activation3-2a"
type: "ReLU"
}
layer {
bottom: "conv3-id1-activation3-2a"
top: "conv3-id1-convolution3-2b"
name: "conv3-id1-convolution3-2b"
type: "Convolution"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv3-id1-convolution3-2b"
top: "conv3-id1-bn3-2b"
name: "conv3-id1-bn3-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv3-id1-bn3-2b"
top: "conv3-id1-bn-scale3-2b"
name: "conv3-id1-bn-scale3-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv3-activation"
bottom: "conv3-id1-bn-scale3-2b"
top: "conv3-id1-shortcut3"
name: "conv3-id1-shortcut3"
type: "Eltwise"
}
layer {
bottom: "conv3-id1-shortcut3"
top: "conv3-id1-activation"
name: "conv3-id1-activation"
type: "ReLU"
}
# Conv layer 4
layer {
bottom: "conv3-id1-activation"
top: "conv4-convolution4-1"
name: "conv4-convolution4-1"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "conv4-convolution4-1"
top: "conv4-bn4-1"
name: "conv4-bn4-1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv4-bn4-1"
top: "conv4-bn-scale4-1"
name: "conv4-bn-scale4-1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv3-id1-activation"
top: "conv4-convolution4-2a"
name: "conv4-convolution4-2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 2
bias_term: false
}
}
layer {
bottom: "conv4-convolution4-2a"
top: "conv4-bn4-2a"
name: "conv4-bn4-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv4-bn4-2a"
top: "conv4-bn-scale4-2a"
name: "conv4-bn-scale4-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv4-bn-scale4-2a"
top: "conv4-activation4-2a"
name: "conv4-activation4-2a"
type: "ReLU"
}
layer {
bottom: "conv4-activation4-2a"
top: "conv4-convolution4-2b"
name: "conv4-convolution4-2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv4-convolution4-2b"
top: "conv4-bn4-2b"
name: "conv4-bn4-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv4-bn4-2b"
top: "conv4-bn-scale4-2b"
name: "conv4-bn-scale4-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv4-bn-scale4-1"
bottom: "conv4-bn-scale4-2b"
top: "conv4-shortcut4"
name: "conv4-shortcut4"
type: "Eltwise"
}
layer {
bottom: "conv4-shortcut4"
top: "conv4-activation"
name: "conv4-activation"
type: "ReLU"
}
# Id layer 4
layer {
bottom: "conv4-activation"
top: "conv4-id1-convolution4-2a"
name: "conv4-id1-convolution4-2a"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv4-id1-convolution4-2a"
top: "conv4-id1-bn4-2a"
name: "conv4-id1-bn4-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv4-id1-bn4-2a"
top: "conv4-id1-bn-scale4-2a"
name: "conv4-id1-bn-scale4-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv4-id1-bn-scale4-2a"
top: "conv4-id1-activation4-2a"
name: "conv4-id1-activation4-2a"
type: "ReLU"
}
layer {
bottom: "conv4-id1-activation4-2a"
top: "conv4-id1-convolution4-2b"
name: "conv4-id1-convolution4-2b"
type: "Convolution"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv4-id1-convolution4-2b"
top: "conv4-id1-bn4-2b"
name: "conv4-id1-bn4-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv4-id1-bn4-2b"
top: "conv4-id1-bn-scale4-2b"
name: "conv4-id1-bn-scale4-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv4-activation"
bottom: "conv4-id1-bn-scale4-2b"
top: "conv4-id1-shortcut4"
name: "conv4-id1-shortcut4"
type: "Eltwise"
}
layer {
bottom: "conv4-id1-shortcut4"
top: "conv4-id1-activation"
name: "conv4-id1-activation"
type: "ReLU"
}
# Conv layer 5
layer {
bottom: "conv4-id1-activation"
top: "conv5-convolution5-1"
name: "conv5-convolution5-1"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 1
pad: 0
stride: 2
bias_term: false
}
}
layer {
bottom: "conv5-convolution5-1"
top: "conv5-bn5-1"
name: "conv5-bn5-1"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv5-bn5-1"
top: "conv5-bn-scale5-1"
name: "conv5-bn-scale5-1"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv4-id1-activation"
top: "conv5-convolution5-2a"
name: "conv5-convolution5-2a"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 2
bias_term: false
}
}
layer {
bottom: "conv5-convolution5-2a"
top: "conv5-bn5-2a"
name: "conv5-bn5-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv5-bn5-2a"
top: "conv5-bn-scale5-2a"
name: "conv5-bn-scale5-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv5-bn-scale5-2a"
top: "conv5-activation5-2a"
name: "conv5-activation5-2a"
type: "ReLU"
}
layer {
bottom: "conv5-activation5-2a"
top: "conv5-convolution5-2b"
name: "conv5-convolution5-2b"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv5-convolution5-2b"
top: "conv5-bn5-2b"
name: "conv5-bn5-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv5-bn5-2b"
top: "conv5-bn-scale5-2b"
name: "conv5-bn-scale5-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv5-bn-scale5-1"
bottom: "conv5-bn-scale5-2b"
top: "conv5-shortcut5"
name: "conv5-shortcut5"
type: "Eltwise"
}
layer {
bottom: "conv5-shortcut5"
top: "conv5-activation"
name: "conv5-activation"
type: "ReLU"
}
# Id layer 5
layer {
bottom: "conv5-activation"
top: "conv5-id1-convolution5-2a"
name: "conv5-id1-convolution5-2a"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv5-id1-convolution5-2a"
top: "conv5-id1-bn5-2a"
name: "conv5-id1-bn5-2a"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv5-id1-bn5-2a"
top: "conv5-id1-bn-scale5-2a"
name: "conv5-id1-bn-scale5-2a"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv5-id1-bn-scale5-2a"
top: "conv5-id1-activation5-2a"
name: "conv5-id1-activation5-2a"
type: "ReLU"
}
layer {
bottom: "conv5-id1-activation5-2a"
top: "conv5-id1-convolution5-2b"
name: "conv5-id1-convolution5-2b"
type: "Convolution"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
bias_term: false
}
}
layer {
bottom: "conv5-id1-convolution5-2b"
top: "conv5-id1-bn5-2b"
name: "conv5-id1-bn5-2b"
type: "BatchNorm"
batch_norm_param {
use_global_stats: true
}
}
layer {
bottom: "conv5-id1-bn5-2b"
top: "conv5-id1-bn-scale5-2b"
name: "conv5-id1-bn-scale5-2b"
type: "Scale"
scale_param {
bias_term: true
}
}
layer {
bottom: "conv5-activation"
bottom: "conv5-id1-bn-scale5-2b"
top: "conv5-id1-shortcut5"
name: "conv5-id1-shortcut5"
type: "Eltwise"
}
layer {
bottom: "conv5-id1-shortcut5"
top: "conv5-id1-activation"
name: "conv5-id1-activation"
type: "ReLU"
}
# Final layers
layer {
bottom: "conv5-id1-activation"
top: "avgpool"
name: "avgpool"
type: "Pooling"
pooling_param {
kernel_h: 6
kernel_w: 4
stride: 1
pool: AVE
}
}
layer {
bottom: "avgpool"
top: "fc632"
name: "fc632"
type: "InnerProduct"
inner_product_param {
num_output: 632
}
}
layer {
bottom: "fc632"
top: "prob"
name: "prob"
type: "Sigmoid"
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment