Created
October 3, 2019 14:33
-
-
Save daviddalpiaz/fbe2a278b7f0d15077c63f44eb8c2adc to your computer and use it in GitHub Desktop.
train a neural network on MNIST in R using Keras
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# see here: https://keras.rstudio.com/ | |
# code reproduced here for easy copy-pasting | |
# install TF if necessary | |
devtools::install_github("rstudio/tensorflow") | |
tensorflow::install_tensorflow() | |
# load keras | |
library(keras) | |
# load mnist from keras | |
mnist <- dataset_mnist() | |
# "undo" returned list | |
x_train <- mnist$train$x | |
y_train <- mnist$train$y | |
x_test <- mnist$test$x | |
y_test <- mnist$test$y | |
# reshape | |
x_train <- array_reshape(x_train, c(nrow(x_train), 784)) | |
x_test <- array_reshape(x_test, c(nrow(x_test), 784)) | |
# rescale | |
x_train <- x_train / 255 | |
x_test <- x_test / 255 | |
# one-hot encoding | |
y_train <- to_categorical(y_train, 10) | |
y_test <- to_categorical(y_test, 10) | |
# define model | |
model <- keras_model_sequential() | |
model %>% | |
layer_dense(units = 256, activation = 'relu', input_shape = c(784)) %>% | |
layer_dropout(rate = 0.4) %>% | |
layer_dense(units = 128, activation = 'relu') %>% | |
layer_dropout(rate = 0.3) %>% | |
layer_dense(units = 10, activation = 'softmax') | |
# compile model | |
model %>% compile( | |
loss = 'categorical_crossentropy', | |
optimizer = optimizer_rmsprop(), | |
metrics = c('accuracy') | |
) | |
# train model | |
history <- model %>% fit( | |
x_train, y_train, | |
epochs = 30, batch_size = 128, | |
validation_split = 0.2 | |
) | |
# evaluate on test set | |
model %>% evaluate(x_test, y_test) | |
# predict on test set | |
model %>% predict_classes(x_test) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment