Created
June 30, 2017 10:07
-
-
Save ilkarman/09766b37841e6dccba52c932bf57cf29 to your computer and use it in GitHub Desktop.
Stochastic Grad Descent for nn
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
SGD <- function(training_data, epochs, mini_batch_size, lr, C, sizes, num_layers, biases, weights, | |
verbose=FALSE, validation_data) | |
{ | |
# Every epoch | |
for (j in 1:epochs){ | |
# Stochastic mini-batch (shuffle data) | |
training_data <- sample(training_data) | |
# Partition set into mini-batches | |
mini_batches <- split(training_data, | |
ceiling(seq_along(training_data)/mini_batch_size)) | |
# Feed forward (and back) all mini-batches | |
for (k in 1:length(mini_batches)) { | |
# Update biases and weights | |
res <- update_mini_batch(mini_batches[[k]], lr, C, sizes, num_layers, biases, weights) | |
biases <- res[[1]] | |
weights <- res[[-1]] | |
} | |
} | |
# Return trained biases and weights | |
list(biases, weights) | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment