Created
December 2, 2016 20:34
-
-
Save reuben/f6dfd493bd78ee7c699e6e178d195c54 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def model(batch_x, seq_length, dropout): | |
def clipped_relu(x): | |
return tf.minimum(tf.nn.relu(x), relu_clip) | |
with slim.arg_scope([slim.variable], device="/cpu:0"): | |
with slim.arg_scope([slim.fully_connected], activation_fn=clipped_relu): | |
with slim.arg_scope([slim.dropout], keep_prob=(1.0 - dropout)): | |
fc_1 = slim.dropout(slim.fully_connected(batch_x, n_hidden_1)) | |
fc_2 = slim.dropout(slim.fully_connected(fc_1, n_hidden_2)) | |
fc_3 = slim.dropout(slim.fully_connected(fc_2, n_hidden_3)) | |
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(n_cell_dim, forget_bias=1.0, state_is_tuple=True) | |
rnn_4, _ = tf.nn.bidirectional_dynamic_rnn(cell_fw=lstm_cell, | |
cell_bw=lstm_cell, | |
inputs=fc_3, | |
dtype=tf.float32, | |
sequence_length=seq_length) | |
rnn_4 = tf.concat(2, rnn_4) | |
fc_5 = slim.dropout(slim.fully_connected(rnn_4, n_hidden_5)) | |
logits = slim.fully_connected(fc_5, n_hidden_6) | |
# Reshape final layer to be time major as CTC expects | |
logits = tf.transpose(logits, [1, 0, 2]) | |
return logits |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment