Last active
May 11, 2016 18:17
-
-
Save tedsta/6309241958738b13cb109ded7e4f9666 to your computer and use it in GitHub Desktop.
Deeplearn-rs mnist graph
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let ctx = Rc::new(ga::Context::new()); | |
let ref mut graph = Graph::new(ctx.clone()); | |
let batch_size = 10; | |
////////////////////////// | |
// Layer 1 | |
// Input: batch_size samples of rows*columns inputs | |
let input = graph.add_variable(vec![batch_size, rows*columns], false, 0.0); | |
// Biased fully connected layer with 300 neurons | |
let (l1_fcb, _, _) = layers::dense_biased(graph, input, 300, | |
init::Normal(0.001, 0.005), // Weights initializer | |
init::Normal(0.001, 0.005)); // Bias initializer | |
let l1_out = layers::activation(graph, Relu(l1_fcb)); | |
////////////////////////// | |
// Layer 2 | |
// Biased fully connected layer with 10 neurons | |
let (l2_fcb, _, _) = layers::dense_biased(graph, l1_out, 10, | |
init::Normal(0.001, 0.005), // Weights initializer | |
init::Normal(0.001, 0.005)); // Bias initializer | |
let l2_out = layers::activation(graph, Relu(l2_fcb)); | |
////////////////////////// | |
// Loss | |
let (loss_out, train_out) = layers::mse(graph, l2_out); | |
let loss_d = graph.add_gradient(loss_out); // Create a gradient to apply to the loss function |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment