Skip to content

Instantly share code, notes, and snippets.

@kyrs kyrs/lstm.prototxt Secret
Last active Sep 20, 2016

Embed
What would you like to do?
name:"lstm_agm"
layer {
name: "inputl"
type: "MemoryData"
top: "data"
top: "label"
include {
phase: TRAIN
}
memory_data_param {
batch_size: 1000
channels: 100
height: 1
width: 1
}
}
layer {
name: "inputlt"
type: "MemoryData"
top: "data"
top: "label"
include {
phase: TEST
}
memory_data_param {
batch_size: 1000
channels: 100
height: 1
width: 1
}
}
layer {
name: "ip0"
type: "Embed"
bottom: "data"
top: "ip0"
embed_param {
num_output: 10
input_dim: 28494
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "lstm1"
type: "LSTM"
bottom: "ip0"
top: "lstm1"
recurrent_param {
num_output: 64
weight_filler {
type: "uniform"
min: -0.08
max: 0.08
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "re2"
type: "Reshape"
bottom: "lstm1"
top: "p1r"
reshape_param {
shape {
dim: 0
dim: -1
}
}
}
layer {
name: "ip3"
type: "InnerProduct"
bottom: "p1r"
top: "ip3"
inner_product_param {
num_output: 4
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "losst"
type: "Softmax"
bottom: "ip3"
top: "losst"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip3"
bottom: "label"
top: "loss"
include {
phase: TRAIN
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.