Skip to content

Instantly share code, notes, and snippets.

@neale
Last active April 6, 2017 18:16
Show Gist options
  • Save neale/23897768d1f1a3343537b13e193181ca to your computer and use it in GitHub Desktop.
Save neale/23897768d1f1a3343537b13e193181ca to your computer and use it in GitHub Desktop.
name: "CNN"
input: "data"
input_shape {
dim: 1
dim: 3
dim: 224
dim: 224
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 16
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool1"
type: "Pooling"
bottom: "conv1"
top: "max_pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv2"
type: "Convolution"
bottom: "max_pool1"
top: "conv2"
convolution_param {
num_output: 32
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool2"
type: "Pooling"
bottom: "conv2"
top: "max_pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv3"
type: "Convolution"
bottom: "max_pool2"
top: "conv3"
convolution_param {
num_output: 64
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool3"
type: "Pooling"
bottom: "conv3"
top: "max_pool3"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv4"
type: "Convolution"
bottom: "max_pool3"
top: "conv4"
convolution_param {
num_output: 128
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool4"
type: "Pooling"
bottom: "conv4"
top: "max_pool4"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv5"
type: "Convolution"
bottom: "max_pool4"
top: "conv5"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool5"
type: "Pooling"
bottom: "conv5"
top: "max_pool5"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv6"
type: "Convolution"
bottom: "max_pool5"
top: "conv6"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "conv6"
top: "conv6"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "max_pool6"
type: "Pooling"
bottom: "conv6"
top: "max_pool6"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer{
name: "conv7"
type: "Convolution"
bottom: "max_pool6"
top: "conv7"
convolution_param {
num_output: 1024
kernel_size: 3
pad: 1
stride: 1
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "conv7"
top: "conv7"
relu_param{
negative_slope: 0.1
}
}
layer {
name: "ave_pool7"
type: "Pooling"
bottom: "conv7"
top: "ave_pool7"
pooling_param {
pool: AVE
kernel_size: 2
stride: 2
}
}
layer{
name: "Fully Connected"
type: "InnerProduct"
bottom: "ave_pool7"
top: "result"
inner_product_param {
num_output: 7
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
value: 0
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment